diff --git a/.replit b/.replit new file mode 100644 index 0000000..fa3d4b6 --- /dev/null +++ b/.replit @@ -0,0 +1,105 @@ +# The command that runs the program. If the interpreter field is set, it will have priority and this run command will do nothing +run = "python3 main.py" + +# The primary language of the repl. There can be others, though! +language = "python3" +entrypoint = "main.py" +# A list of globs that specify which files and directories should +# be hidden in the workspace. +hidden = ["venv", ".config", "**/__pycache__", "**/.mypy_cache", "**/*.pyc"] + +# Specifies which nix channel to use when building the environment. +[nix] +channel = "stable-21_11" + +# The command to start the interpreter. +[interpreter] + [interpreter.command] + args = [ + "stderred", + "--", + "prybar-python3", + "-q", + "--ps1", + "\u0001\u001b[33m\u0002\u0001\u001b[00m\u0002 ", + "-i", + ] + env = { LD_LIBRARY_PATH = "$PYTHON_LD_LIBRARY_PATH" } + +[env] +VIRTUAL_ENV = "/home/runner/${REPL_SLUG}/venv" +PATH = "${VIRTUAL_ENV}/bin" +PYTHONPATH = "${VIRTUAL_ENV}/lib/python3.10/site-packages" +REPLIT_POETRY_PYPI_REPOSITORY = "https://package-proxy.replit.com/pypi/" +MPLBACKEND = "TkAgg" +POETRY_CACHE_DIR = "${HOME}/${REPL_SLUG}/.cache/pypoetry" + +# Enable unit tests. This is only supported for a few languages. +[unitTest] +language = "python3" + +# Add a debugger! +[debugger] +support = true + + # How to start the debugger. + [debugger.interactive] + transport = "localhost:0" + startCommand = ["dap-python", "main.py"] + + # How to communicate with the debugger. + [debugger.interactive.integratedAdapter] + dapTcpAddress = "localhost:0" + + # How to tell the debugger to start a debugging session. + [debugger.interactive.initializeMessage] + command = "initialize" + type = "request" + + [debugger.interactive.initializeMessage.arguments] + adapterID = "debugpy" + clientID = "replit" + clientName = "replit.com" + columnsStartAt1 = true + linesStartAt1 = true + locale = "en-us" + pathFormat = "path" + supportsInvalidatedEvent = true + supportsProgressReporting = true + supportsRunInTerminalRequest = true + supportsVariablePaging = true + supportsVariableType = true + + # How to tell the debugger to start the debuggee application. + [debugger.interactive.launchMessage] + command = "attach" + type = "request" + + [debugger.interactive.launchMessage.arguments] + logging = {} + +# Configures the packager. +[packager] +language = "python3" +ignoredPackages = ["unit_tests"] + + [packager.features] + enabledForHosting = false + # Enable searching packages from the sidebar. + packageSearch = true + # Enable guessing what packages are needed from the code. + guessImports = true + +# These are the files that need to be preserved when this +# language template is used as the base language template +# for Python repos imported from GitHub +[gitHubImport] +requiredFiles = [".replit", "replit.nix", ".config", "venv"] + +[languages] + +[languages.python3] +pattern = "**/*.py" + +[languages.python3.languageServer] +start = "pylsp" diff --git a/BMI Calculator using Python.py b/BMI Calculator using Python.py new file mode 100644 index 0000000..2cc1174 --- /dev/null +++ b/BMI Calculator using Python.py @@ -0,0 +1,14 @@ +# Program to calculate BMI + +# Getting Inputs: +height = int(input('Enter height: ')) +weight = int(input('Enter weight: ')) + +# Function to calculate BMI +def BMICalculator(height, weight): + bmi = weight/(height**2) + return bmi + +# Calling BMI function +bmi = BMI(height, weight) +print("The BMI is", format(bmi), "so ", end='') diff --git a/BMI.py b/BMI.py new file mode 100644 index 0000000..a450dda --- /dev/null +++ b/BMI.py @@ -0,0 +1,64 @@ +def input_variables(): + + print("Name:") + name = input("Enter the name of the person.") + + print("Age:") + age = int(input("Enter the age of the person.")) + + print("Sex:") + sex = input("Enter sex: M for male, F for female, O for non binary.") + while sex!= 'M' and sex!='F' and sex!='O': + print("Incorrect choice.") + sex = input("Enter sex: M for male, F for female, O for non binary.") + + print("Weight:") + unit_weight = input("Press 1 for kilograms, 2 for pounds.") + while unit_weight!= '1' and unit_weight!='2': + print("Incorrect choice.") + unit_weight = input("Press 1 for kilograms, 2 for pounds.") + if unit_weight == '1': + weight_in_kg = float(input("Enter the weight of the person in kilograms.")) + else: + weight_in_kg = float(input("Enter the weight of the person in pounds."))*0.453592 + + print("Height:") + unit_height = input("Press 1 for meters and centimeters, 2 for feet and inches") + while unit_weight!= '1' and unit_weight!='2': + print("Incorrect choice.") + unit_height = input("Press 1 for meters and centimeters, 2 for feet and inches") + if unit_height == '1': + height_in_m = float(input("Enter the height of the person in meters.")) + float(input("Enter the remaining height of the person in centimeters"))*0.01 + else: + height_in_m = (float(input("Enter the height of the person in feet"))*12 + float(input("Enter the remaining height of the person in inches")))*0.0254 + + return name, age, sex, weight_in_kg, height_in_m + +def calculate_BMI(info): + wt = info[3] + ht = info[4] + BMI = wt/(ht**2) + + return BMI + +def find_body_state(BMI): + if BMI<18.5: + body_state = 'underweight' + elif BMI<24.9: + body_state = 'normal' + elif BMI<29.9: + body_state = 'overweight' + else: + body_state = 'obese' + + return body_state + +def output(): + info = input_variables() + BMI = calculate_BMI(info) + body_state = find_body_state(BMI) + + print("Name: {}\nAge: {}\nGender: {}\nWeight(in kg): {:.3f}\nHeight(in m): {:.2f}\nBody Mass Index: {:.2f}\nAcoording to your body mass index, you are {}.".format(info[0], info[1], info[2], info[3], info[4], BMI, body_state)) + +output() + \ No newline at end of file diff --git a/DegreetoFahrenheit.py b/DegreetoFahrenheit.py new file mode 100644 index 0000000..d80584d --- /dev/null +++ b/DegreetoFahrenheit.py @@ -0,0 +1,3 @@ +celsius = float(input("Enter temperature in celsius: ")) +fahrenheit = (celsius * 9/5) + 32 +print('%.2f Celsius is: %0.2f Fahrenheit' %(celsius, fahrenheit)) \ No newline at end of file diff --git a/SudokuSolver.py b/SudokuSolver.py new file mode 100644 index 0000000..d1fc4d5 --- /dev/null +++ b/SudokuSolver.py @@ -0,0 +1,85 @@ +''' +Taken help from: - https://www.youtube.com/watch?v=tvP_FZ-D9Ng&t=793s (Kylie Ying) +Here i am tring to solve a sudoku with the help of programming + +0: - means that its a blank +''' + + +import numpy as np + +class Solve_sudoku: + #this perticular function will take argument as list and then convert it as numpy array + #did not return anything + def __init__(self, matrix: list) -> None: + self.Array = np.array(matrix)#converted the simple list to numpy array + + def blank(self): + array = self.Array + row_count = len(array)#this will get the numbe of rows + + col_count = len(array[0])#this will get the number of columns + + for i in range(row_count): + for k in range(col_count):#trancversing the elements of array + if array[i][k] == 0: + return i, k #and return the the coordinates in the location + else: + return None, None #else return the none none + + def is_valid(self, number: int, row: int, col: int) -> bool: + #our array + array = self.Array + #row + if number in array[row]:#if gussed number is present in the row + return False + + if number in array.transpose()[col]:#if the number in the column------------------>review once + return False + + #now we have to check in 3 X 3 matrix + #step 1 is tho find the starting point of row and column + + row_start, col_start = (row // 3) * 3, (col // 3) * 3#this will help us in knowing in which set of rows are matrix is + + for i in range(row_start, row_start + 3): + for k in range(col_start, col_start + 3): + if array[i][k] == number: + return False + + return True#if all above conditions are false + + + def solve(self): + array = self.Array #this is the array that we have to solve + + #Task 1 to find blans in the problem + + row, column = self.blank()#the coordinates of the blank in the matrix (row, column) + + if row is None: + return True, array + + for guess in range(1, 10): + + if self.is_valid(guess ,row, column):#if the number is valid for the plave then just mutate it in the list + array[row][column] = guess + + if self.solve()[0]: + return True, array + + array[row][column] = 0 + return False, array + +grid = Solve_sudoku([ + [7, 2, 0, 5, 0, 0, 0, 0, 0], + [5, 0, 0, 0, 4, 0, 6, 0, 0], + [0, 8, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 3, 0, 0, 9], + [0, 4, 3, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 6, 0, 5, 0], + [0, 0, 1, 6, 0, 0, 4, 0, 7], + [0, 0, 0, 0, 0, 9, 0, 0, 0], + [0, 9, 0, 0, 0, 0, 0, 2, 0] + ])#sudoku +print(grid.solve()[1]) \ No newline at end of file diff --git a/alphabetpyramid.py b/alphabetpyramid.py new file mode 100644 index 0000000..3ff4dc4 --- /dev/null +++ b/alphabetpyramid.py @@ -0,0 +1,8 @@ +rows=int(input("Enter number of rows:")) +ascii_value=65 +for i in range(rows): + for j in range(i+1): + alphabet=chr(ascii_value) + print(alphabet,end=" ") + ascii_value=ascii_value+1 + print("\n") diff --git a/code-for-body-mass-index.py b/code-for-body-mass-index.py new file mode 100644 index 0000000..8383a64 --- /dev/null +++ b/code-for-body-mass-index.py @@ -0,0 +1,53 @@ +// Code by Rajkumar Dake +// Code in Python of body mass index +print('\t\t\t BMI Calculator') +print('\t\t\t By Abdinasir Hussein') +print('\n Hello, this is a BMI Calculator!') + +input('Do you wish to enter metric units or imperial units: ') + +while input == 'metric': + height = float(input('Please enter your height input meters(decimals): ')) + weight = int(input('Please enter your weight input kg: ')) + bmi = weight/(height*height) + + if bmi <= 18.5: + print('Your BMI is', bmi,'which means you are underweight.') + + elif bmi > 18.5 and bmi < 25: + print('Your BMI is', bmi,'which means you are normal.') + + elif bmi > 25 and bmi < 30: + print('your BMI is', bmi,'overweight.') + + elif bmi > 30: + print('Your BMI is', bmi,'which means you are obese.') + + else: + print('There is an error with your input') + print('Please check you have entered whole numbers\n' + 'and decimals were asked.') + +while input == 'imperial': + height = int(input('Please enter your height input inputches(whole number): ')) + weight = int(input('Please enter your weight input pounds(whole number): ')) + bmi = (weight*703)/(height*height) + + if bmi <= 18.5: + print('Your BMI is', bmi,'which means you are underweight.') + + elif bmi > 18.5 and bmi < 25: + print('Your BMI is', bmi,'which means you are normal.') + + elif bmi > 25 and bmi < 30: + print('Your BMI is', bmi,'which means you are overweight') + + elif bmi > 30: + print('Your BMI is', bmi,'which means you are obese.') + + else: + print('There is an error with your input') + print('Please check you have entered whole numbers\n' + 'and decimals were asked.') + +input('\n\nPlease press enter to exit.') \ No newline at end of file diff --git a/draw.py b/draw.py new file mode 100644 index 0000000..8defedb --- /dev/null +++ b/draw.py @@ -0,0 +1,15 @@ +from turtle import * +bgcolor("black") +speed(0) +hideturtle() + +for i in range(50,400): + color("red") + circle(i) + color("orange") + circle(i*0.8) + color("white") + circle(i*0.6) + right(3) + +done() diff --git a/factorial.py b/factorial.py new file mode 100644 index 0000000..90ca107 --- /dev/null +++ b/factorial.py @@ -0,0 +1,12 @@ +# input vlaue from the user +n=int(input('enter no: ')) + +# take flag, f=1 +f=1 + +# loop until the user input value +for i in range(n): + f=f*(i+1) + +# print the output/factorial +print ('Factorial: ',f) diff --git a/leapyear.py b/leapyear.py index f6a330b..43cd463 100644 --- a/leapyear.py +++ b/leapyear.py @@ -1,13 +1,29 @@ -def CheckLeap(Year): - # Checking if the given year is leap year - if((Year % 400 == 0) or - (Year % 100 != 0) and - (Year % 4 == 0)): - print("Given Year is a leap Year"); - # Else it is not a leap year - else: - print ("Given Year is not a leap Year") -# Taking an input year from user -Year = int(input("Enter the number: ")) -# Printing result -CheckLeap(Year) +##############main##################### +def CheckLeap(Year): + # Checking if the given year is leap year + if((Year % 400 == 0) or + (Year % 100 != 0) and + (Year % 4 == 0)): + print("Given Year is a leap Year"); + # Else it is not a leap year + else: + print ("Given Year is not a leap Year") +# Taking an input year from user +Year = int(input("Enter the number: ")) +# Printing result +CheckLeap(Year) +################################ +def CheckLeap(Year): + # Checking if the given year is leap year + if((Year % 400 == 0) or + (Year % 100 != 0) and + (Year % 4 == 0)): + print("Given Year is a leap Year"); + # Else it is not a leap year + else: + print ("Given Year is not a leap Year") +# Taking an input year from user +Year = int(input("Enter the number: ")) +# Printing result +CheckLeap(Year) +##############main##################### \ No newline at end of file diff --git a/listpallindrome.py b/listpallindrome.py new file mode 100644 index 0000000..2ba3d13 --- /dev/null +++ b/listpallindrome.py @@ -0,0 +1,5 @@ +string =input(("Enter a letter:")) +if(string==string[::-1]): + print("The letter is palindrome") +else: + print("The letter is not pallindrome") diff --git a/pratice_iof_thread11.py b/pratice_iof_thread11.py new file mode 100644 index 0000000..519a1a8 --- /dev/null +++ b/pratice_iof_thread11.py @@ -0,0 +1,30 @@ + + +from collections import deque +import threading +import time +class queue: + def __init__(self): + self.box=deque() + def enque(self,data): + self.box.appendleft(data) + def deque(self): + print(self.box.pop()) + +od=queue() +items=["Beef","salad","rice","mutton"] +ln=len(items) +for data in range (0,ln): + + thrd1=threading.Thread(target=od.enque,args=(items[data],)) + thrd2=threading.Thread(target=od.deque ,args=()) + + time.sleep(0.5) + + thrd1.start() + + time.sleep(2) + + thrd2.start() + + time.sleep(1) \ No newline at end of file diff --git a/primenumber.py b/primenumber.py new file mode 100644 index 0000000..3e5ff1a --- /dev/null +++ b/primenumber.py @@ -0,0 +1,11 @@ +num=int(input("Enter a number: ")) +flag =False +if num>1: + for i in range(2,num): + if(num%i)==0: + flag = True + break +if flag: + print(num,"Is not a prime number") +else: + print(num,"is a prime number") diff --git a/python sudoku solver/suduko_solver/Generator.py b/python sudoku solver/suduko_solver/Generator.py new file mode 100644 index 0000000..27730f2 --- /dev/null +++ b/python sudoku solver/suduko_solver/Generator.py @@ -0,0 +1,137 @@ +import pySudoku +import random +import time + +def toString(s): + + output = "" + for i in range(9): + for j in range(9): + output += str(s[i][j]) + return output + "\n" + +def checkValid(s, row, col): + + block_row = row // 3 + block_col = col // 3 + + # Row and Column + # Ignore blank spots + for m in range(9): + if s[row][m] != 0 and m != col and s[row][m] == s[row][col]: + return False + if s[m][col] != 0 and m != row and s[m][col] == s[row][col]: + return False + + # Block + for m in range(3): + for n in range(3): + newRow = m + block_row*3 + newCol = n + block_col*3 + if s[newRow][newCol] != 0 and newRow != row and newCol != col\ + and s[newRow][newCol ] == s[row][col]: + return False + + return True + +def populateBoard(s, row, col): + + if row == 8 and col == 8: + used = pySudoku.test_cell(s, row, col) + s[row][col] = used.index(0) + return True + + if col == 9: + row = row+1 + col = 0 + + temp = list(range(1, 10)) + random.shuffle(temp) + # Fill Sudoku + for i in range(9): + s[row][col] = temp[i] + if checkValid(s, row, col): + if populateBoard(s, row, col+1): + return True + s[row][col] = 0 + return False + +def DFS_solve(copy_s, row, col): + + num_solutions = 0 + + # Reached the last cells without any error, so there is a solution + if row == 8 and col == 8: + return num_solutions + 1 + + if col == 9: + row = row+1 + col = 0 + + if copy_s[row][col] == 0: + # Used = list of size 10 representing which numbers are possible + # in the puzzle. 0 = possible, 1 = not possible. Ignore index 0. + used = pySudoku.test_cell(copy_s, row, col) + # No possible solutions. Return 0 for number of solutions + if 0 not in used: + return 0 + + while 0 in used: + copy_s[row][col] = used.index(0) + used[used.index(0)] = 1 + num_solutions += DFS_solve(copy_s, row, col+1) + + # Reached here? Then we tried 1-9 without success + copy_s[row][col] = 0 + return num_solutions + + num_solutions += DFS_solve(copy_s, row, col+1) + return num_solutions + +def reduce_sudoku(s, difficulty): + + indices = list(range(81)) + random.shuffle(indices) + + while indices: + row = indices[0] // 9 + col = indices[0] % 9 + temp = s[row][col] + s[row][col] = 0 + indices = indices[1:] + + copy_s = [l[:] for l in s] + + pySudoku.initial_try(copy_s) + + for line in copy_s: + if 0 in line: + num_solutions = DFS_solve(copy_s, 0, 0) + # Not a puzzle with a unique solution, so undo the last insertion + if num_solutions > 1: + s[row][col] = temp + if difficulty == "E" or difficulty == "e": + return + break + + return + +def main(): + f = open("SudokuPuzzles.txt", "w") + user_input = int(input("How many Sudoku puzzles would you like to generate?: ")) + difficulty = input("Easy or Difficult puzzles?: (e or d)") + start = time.time() + + for _ in range(user_input): + # 9 x 9 grid of 0s + s = [[0]*9 for _ in range(9)] + + populateBoard(s, 0, 0) + reduce_sudoku(s, difficulty) + output = toString(s) + f.write(output) + + print("{:.2f} seconds to come up with {} Sudoku puzzles.".format(time.time() - start, user_input)) + +if __name__ == '__main__': + main() diff --git a/python sudoku solver/suduko_solver/Sudokus.txt b/python sudoku solver/suduko_solver/Sudokus.txt new file mode 100644 index 0000000..c5da92d --- /dev/null +++ b/python sudoku solver/suduko_solver/Sudokus.txt @@ -0,0 +1,269 @@ +3 0 9 0 0 0 4 0 0 +2 0 0 7 0 9 0 0 0 +0 8 7 0 0 0 0 0 0 +7 5 0 0 6 0 2 3 0 +6 0 0 9 0 4 0 0 8 +0 2 8 0 5 0 0 4 1 +0 0 0 0 0 0 5 9 0 +0 0 0 1 0 6 0 0 7 +0 0 6 0 0 0 1 0 4 + +0 0 0 7 0 4 0 0 5 +0 2 0 0 1 0 0 7 0 +0 0 0 0 8 0 0 0 2 +0 9 0 0 0 6 2 5 0 +6 0 0 0 7 0 0 0 8 +0 5 3 2 0 0 0 1 0 +4 0 0 0 9 0 0 0 0 +0 3 0 0 6 0 0 9 0 +2 0 0 4 0 7 0 0 0 + +200000060000075030048090100000300000300010009000008000001020570080730000090000004 +807000003602080000000200900040005001000798000200100070004003000000040108300000506 +008009320000080040900500007000040090000708000060020000600001008050030000072900100 +600108000009070003005000080310005800000000000007600042090000400400030100000207006 +030010060020004000100003500300090000860000041000070008005900002000100090040080050 +001093000000100004020060370700000005080504060400000008092080010100009000000340600 +040390100005002000100800006003500900000000000006008300700004009000700400009056080 +800020070000500009340700000070000400005301700001000030000004067100006000020050008 +000090200504008006000000005002007010000605000030800900100000000400300709008040000 +000050060102400000350007004004000050000209000090000100500800013000006408030090000 +900340000001000000030000850020501600050000010004903080082000030000000900000072005 +600002090050004030000008001506000000910000084000000905200700000060300040070500002 +300004800010000200600009000000081700020000080003560000000100004004000030008700009 +500000070009500000070039100080070010002090600040010030008250040000006800020000005 +000030900000900008570004000300076040004000100020490007000500063100007000008060000 +500000000040150090000209006207000030600010004050000801800403000030085020000000003 +504000000100300000000006480006002007900107006400600900057800000000001009000000603 +002000079008130000000000840200017000070000030000590002069000000000063100520000400 +040000020605200000000450900010009400400708005002100080007036000000002301080000060 +000307060070900000102000007004006205006000700507200900700000804000001030030509000 +003002070800003050096000300000078600000409000004610000001000780080900003020300400 +100008003000000260003000540010857009000000000500213080062000400071000000300400007 +500032000000001890000006400000000026009000700480000000007500000041200000000960003 +030074000602003000107000000805060002020000090700020501000000607000200905000510030 +030060900290000000000000710000100480000206000069003000084000000000000035007090020 +080006902010000050900050003000000007005801200300000000600090004030000070407600080 +600008300000000857043000000700010400000723000001040006000000120526000000008300004 +000090005100800000038007040009008000756000823000200600040500790000001008600020000 +100000000009580040025600000090070080008000500010040070000008630070034200000000009 +006005700070000002000608004000050030001807900080060000400901000100000060008700300 +000000306030068000090040010350020100000804000004090067070030090000510070506000000 +090002030050040006000000074000080200010000050004070000930000000800030060020900010 +000005304500020001006040007000009270080000090079100000800030600700050002304600000 +000502700000600001004000200000008046600000009190700000007000500400009000003801000 +009080000060270009000000501006800007000764000400009200103000000200048060000050800 +000306000025000300901000020200000080000814000070000009060000103004000760000407000 +100900074000004020000080056030000700000635000002000090590060000020100000460002001 +000000003000001700650000048020009670000050000078300020790000061004500000200000000 +000000042900040010030005008020370500000020000005091060100700030080010007690000000 +105070000000100204900000070060007000004201600000600030090000001607008000000090803 +000000070034100000700060405502008700000020000008900506301040007000009860020000000 +060000008000100907050030004007051006000000000600490200900040080104003000200000030 +090060000100000806003200007000800019005000200270001000500004700704000003000050060 +009200050380017000000000060400020980000000000038040007040000000000980036020005100 +280001900000000070000060012032008007000040000400500820320070000010000000008900054 +100000007500643000009000000010000008406205309300000070000000200000381004900000005 +010020000060004800004609030000100600050000090002003000030506100009800050000070040 +096800000700050060800041000020000700500000001008000090000160009040090006000007230 +680507000000000200000021800030060105000000000906040080001290000009000000000803097 +600000300205080700090007000400000001000561000900000002000200050008010604003000009 +003006000070200000080010609000800010067030840050007000308090020000001060000400100 +020009708068500000000000000200070004040000030500090001000000000000003290401200050 +090010007000500600703000002000705000820000079000804000400000106005006000600070030 +000630010000809630006004000600000700200050003001000005000700900057908000040065000 +000030005230000000901000340000407090008000400090805000087000506000000072500060000 +349000000000000700000509002200095007001000400800720005100402000008000000000000376 +000009350450002007007000100040080003070000010100040020006000700300500069084900000 +860000200000740005000000016000095600030000090005160000210000000700083000004000039 +410000003030900008000200000003060000508000904000080500000002000100005020800000016 +070090804400200000100005000039070000007000200000050980000600001000009008701080040 +000350060070000054009002000060000008003000600800000070000100900450000080010036000 +860000020000090500020008004904000000000519000000000302300100050002060000040000068 +000800070047091060130600000000000203000040000605000000000002051060310480050008000 +000001000080052097000800240205007000000000000000900304017008000460790080000600000 +000008000000007029270109038003000060700000005080000100930501087140700000000400000 +010008602000001009000470000000000750035000120086000000000054000900300000602700030 +000004000000500062706010000000400073004090500950002000000060308390007000000900000 +080005020070210000000060300400000001035020490900000003009080000000059080050700060 +700064085800200000000000430184000000000000000000000576027000000000009001410350002 +040020700000008001000001208490000100107000604003000057304200000700900000006080020 +000006708000007400200900030080000010000538000020000050090004006005200000406100000 +005002000200000007010400300060010409800000001103070050004005080900000003000600900 +006000000200908010000100530300007100600000008002400003058006000020801009000000400 +000000504003400002009082000240001900000000000008900076000230800600009700107000000 +690000800008500047002003000000800053000000000970001000000300400240006500001000062 +070090150980070000200300000060408000100000002000609030000003007000060018097040020 +029000000000807300000200800070010050540000018010080090001006000004305000000000230 +400050000030701008006000010007002400500806001003100800050000100600305090000070004 +047000100000100007000908000006500014009080500520001600000809000900003000001000230 +390000000020096000700083100008400000503000204000008500002360001000840070000000056 +025000080030020009900040000012003000400000007000100850000080001500070020040000370 +000306000502040007100000080000700060460000012080009000010000006900050401000902000 +000000007010009680360040000001400700500000002003006900000090035034800010200000000 +005009070370010000000000809000400100200801005007005000602000000000060032050200900 +300800104029700000000050020000000700600902003001000000050030000000005980704008001 +000600900080340000402001000040000508600050007703000010000800201000024030006007000 +000201300000000620800000007008700002005368100300009800400000009079000000001805000 +000200040000500300200010078000000920050060080091000000520040003007001000060008000 +000300509000020060140090000006001020003060400070800900000030048050070000907002000 +040000000900003006530098000000009170000020000053800000000240053700900002000000010 +000026007100000920000000608070100030002040500090008060204000000069000001800470000 +700001058090000710000460000380000000000825000000000042000016000029000060560300001 +000002040008000390000008700902060000050080030000010408003400000015000600020100000 +300060020006500004800090010904000000000783000000000802050070009100006500060030007 +300040690500000000007200001009080000206000305000010400100007900000000003038090007 +480000050000480600060200000001600009200000003500004100000009080004032000090000034 +030001005006400000001057006080000004004803900200000080400210300000009200600500010 +004700009080200007200100080000000630500000001042000000010009002800007060600003400 +006000005700004000083900070000610000040020080000038000070005820000200003600000900 +000000001005036000901082000093000020060000070070000810000810904000340500300000000 +000006015078000900400009000592000080000070000010000256000600002009000470280100000 +503200000106009000000004002007000000805702903000000500700400000000300608000001204 +040200007000070003906000200000059640000040000032160000004000906500080000300001050 +048006009000200086000500000321000005000000000800000497000005000710009000200400730 +003906000020000400008001005205010006800000003900050102600800300009000070000305200 +800005000007200000005070890070090060500060002040030050098050700000003600000400003 +000019006300007508046000000400000600050684020002000007000000160108700005600350000 +003000067057100000900040000000001308500070009809200000000080004000004720620000100 +030000601000005020000790038005007000100000007000800300780024000040900000903000010 +804001000090800003000020860000000090130000054040000000075040000200008030000900702 +000800000061000004800005007100740003009000800700083005500300009200000360000004000 +000037500800000000091800003000060057000403000140050000700008360000000004006290000 +080000790000306000219000000000024600900000004004780000000000847000507000042000010 +000000410002800000000050030009004060700306004080100700030020000000009200051000000 +005000207000800014000100305020003000010904070000500040608009000230008000701000400 +200008900130000500600720000080004009000000000900600020000043001003000046004100008 +000405002002000000900102806800000410000090000015000007104703005000000700300609000 +300060900020030000901000050000603000040020010000508000060000305000050070002070008 +060000100020065000400072003080009400000030000005400090200940005000620070007000010 +040005103705003000000206009104000030000020000070000208800104000000900805207500040 +080300000400100700003002600000500013007000400360004000009200800006003009000008050 +800000050000406200049050000018030000060020070000040680000060530005308000020000001 +090003007080700000160409000005000900070030060006000800000302041000001050200900080 +000030080024001600005200000060800000040060050000009030000008300002300570010020000 +040350900302006000000000400700060000050030090000010005009000000000900507001078020 +080100000700000600000684970078001000200000007000300540042836000007000009000005060 +004059600800400000051300002500900008030000060400006007900002830000004006002890100 +000027000060000001009080503020300605000000000708004090102030900500000040000840000 +040000000120003000000060930690007000004050600000900084067080000000300061000000090 +903005700500002000100040020700060040000000000030070002060010003000900008001400209 +003800007001090005900400000050100600000000000006007080000003009700020800400006500 +000000080400700009000219005007000830903020106082000900500934000300006001060000000 +040000000000300052159020800060800000010040020000009070008060517420005000000000080 +050600004008000090000700020085091060004000800070580940030002000040000100900006070 +050009000200000400000800016600080300490010065005090004870003000001000002000200050 +000050008000020500500008317900000470000492000035000001197200004004070000600010000 +308400090040010000060000002000501008400090003200708000700000030000080060010007504 +800005000000040097370000006010930000002010300000056010400000021580060000000100003 +500006000003000020800900530000008040200605009060700000017004005030000900000100002 +412800009000030000080402000900000700006000300005000008000108050000060000800005147 +970002000000000600026000034001030000800010003000080900390000580007000000000400076 +000000000200780500400500031506000070000060000010000203630002009009037005000000000 +075042000000000400020060000060100730107000209089003010000010040008000000000790360 +650007000890230000010050000900000106000309000304000008000070020000082097000500061 +000004800007089000130060000084000200500000009002000480000030076000410900006700000 +000008200900150000700200400007000036000509000430000500008006005000085004002700000 +070008402009000005000734000310007000090000070000600049000426000700000200602300050 +800070090005408000000000403000006020906000507040100000609000000000903800080060001 +200740000040300000360002070009000301000601000805000200030200084000008030000053009 +030009000806007900000000402000040700070050060002030000603000000007500804000200050 +150000020609203000003000090000300070008407600060001000010000400000709108040000052 +904580000208009000000000010005743080000000000090862300050000000000300604000057102 +007200300590000100008000004000003040300704008060100000700000500001000069009002400 +000023500450000000000000713000907020009302600020501000965000000000000084004270000 +130008000060000000002000409000820670000060000015074000907000200000000090000200083 +000000017200006940000034000600000020400805006010000009000760000096300002530000000 +008020100540000000000000790100800003000165000800002004095000000000000028007040500 +070020050005460000803000000000000370900000008041000000000000103000093500020070060 +037002000200410000000900010410000009008000100700000053080005000000026005000700860 +000000001700008000050297080000009420500601008063700000040863050000900003200000000 +700003000069000004000205890010500080008000600020001030056304000100000260000700009 +030900020070600800001500000800000007090000060400000002000001500006007090020003010 +000870000007001050000002760006000007302090608500000300084200000010900200000043000 +002900001010200000300000040028700000400000003000004250090000004000008070600005900 +004010000006203800000000007800700050500000004090006001600000000003508900000090200 +200086000009005000006900705004000070005000200090000400608009100000300500000720004 +000080205008003104000200000700000039000504000280000001000001000401700600305090000 +000870012000000350000206007000000980083090540094000000100904000068000000450067000 +608000015010005000000008600000046071070020040240970000005300000000400030130000807 +006000000500930010000060089003007020001000500080300100360010000070043006000000200 +004000689500000200300900000800103000007402100000509004000004008001000002465000900 +000200030000090508016500000724000000000102000000000286000003170509060000070004000 +970500000300600704000700050800000000015030920000000006030002000706005008000009061 +046200000015070000300005006703000010400010002080000704200300001000090240000001860 +010300000600200004030008070007000800250090013004000500080700090900001008000003060 +000005003800000060104000905090603400000010000002509070503000702070000004600800000 +000560000526004070100000600400092006080000040900410007003000005040900723000027000 +007005000100060309080000700000046000072108690000970000006000010403010007000700200 +008000203600000007100903000000400390000060000054007000000106005300000002805000600 +034000800000002000076030005005090004000207000900080300100050420000700000009000180 +052010860000700000070050000080009040009000100020600050000030090000002000037080520 +005008002000000070700600013400350008070000090300096007820004006040000000100800400 +001009000005300000900001470000900501000080000803006000098700003000005600000400200 +104600500000040200900000800040000050600427009030000070005000001006080000007001905 +908000000045010000007000039030500008000891000700004050610000400000060120000000605 +108000006000200000000034700500000041006000900390000007005680000000002000700000304 +000010020009006004071002009080000003000607000500000080300100640800500100010020000 +300000700602050001000002400070001000100609005000500040008200000200030809001000002 +570000000600000900000900016200057000080040030000310004920004000007000003000000045 +175600000000002005000080040006020000008506400000030800050010000700200000000003967 +700100000060000013000040800830000190000304000015000034009050000240000070000008002 +000008793000020060050706000049000007005000900100000850000602010020030000581400000 +000002640010000000000806059080004060400105007050900020230609000000000030067500000 +403090601500000390000100000010200009050040060900003020000008000086000005109020706 +000400000000009824010000050800072300300000007005640008060000010459200000000007000 +590060000000000009010009048041000080000403000070000160420700050300000000000080092 +048003007600508010000000500000800059001000800870009000007000000060402003400700920 +000010720900002030640000105704900000000080000000005401407000062030600007056040000 +076000004000270008300001000000400090803000507060002000000100009600035000400000850 +800020070100000060072630000000000240600000003083000000000097810010000004040080009 +007080000000002607800010020006500300090000040004007800010030004509200000000050900 +097030580000000006050000300040300008000256000200001060001000040900000000034020750 +093500800086010070000700000000000001050307090900000000000006000020080930008009250 +000700060001400005500800720000000480000090000037000000045002008900001300020006000 +050040600000030050000005087810003000009000200000500016730400000080070000002060090 +006000027000065000543080000005003006000090000400700300000070182000140000790000400 +000054070000000009007100605630000750080000040045000026309008100800000000010620000 +000630040080209005002000900090000501500000004807000030005000800700903020020071000 +000008050009000700830040020400500001000106000900007006020060095006000300050800000 +000700008650000001070023900047300005001000800500001630009540080400000012700002000 +000900400000708200030000600008000030013207850040000700006000010005603000007002000 +004000060050000800100265070020001000008090100000700090010628005007000030040000900 +004009007000005010700006804500092000009000200000480006903100008080900000400200700 +003000007410020000000009380000530040000000000060097000029400000000010025700000100 +000006000000070213004001090000100084002000100360004000090300800715040000000700000 +807000010000002000030800602002104000700000003000908500904001050000500000060000408 +900005600700000812060000000009070003080040050600080700000000090394000008001200004 +046000030000000900001870400000780001050000020100049000004036800009000000030000510 +000002560000060000500007300790000020206000104010000089007900008000050000043800000 +008609004736100000000080010070000300002040800009000020050090000000002765600507900 +060000709400001062020090000000004007003070400500900000000050080790300004201000070 +700000100030009007000500320000040001095080640800090000051007000200100030007000004 +060930080003005000075400000008000301000040000107000800000002460000500200040086050 +420000050108030070009600000006208000000010000000706300000007800040050709090000032 +200000058010007340604000000040001060000020000020300010000000706035400090860000004 +700005000050000020009002006007000500840020067006000300300600900010000030000100008 +602000500000003000037090080206700300070000050003005602040070190000800000001000803 +004700003100000006003002090001073000900010004000940100080100200500000009600008300 +952006000000000000001700950000070430400090006017050000083009500000000000000400382 +308000040000000080000013002000600470062040530074005000600570000010000000080000609 +000000006094050007080100050800095700300000009006870004020003070500010640100000000 +007100600003002004004050000300000009020907080700000001000040100900600400005003200 +030000006600005010080004705800600200500000003003007009209100030050900004700000060 +040200007000070500380000060006900700000050000008001900030000058009040000800006030 +000041003000020480000700020307000091008000600690000205020004000065080000700590000 +000004700061500040002000009190040000000060000000030051500000900070002460003700000 +060020009700000000000509340900000803000718000806000004014802000000000002300090050 +800340002600000040000080700000008903008000100702900000001050000020000005300026007 +000002040004000907100460000600000015008030700240000008000071009302000800010500000 +020000040800007000000360007201000650000090000067000109100048000000100003050000020 +067000050800010000009002008420600000006903800000004076100400700000090005080000360 +030060009704080000000005300053000000002040100000000520001200000000050406600010080 +000890403000074050004000000009000075300010002610000900000000700050920000206081000 +790304108000006000000080059030000497000000000217000030470010000000700000302609071 +000901405014070000000000007006007000870020069000300200700000000000040890301605000 +005001002000000030760200084000904600010000090009607000470002069030000000100300800 +007040000500002906000000107300260000040070010000083009905000000203600005000090800 +000600001000200405870305000020000600100080007005000040000407092903001000200003000 +000900400240001000000500360002007030080030070030400800094002000000700086 \ No newline at end of file diff --git a/python sudoku solver/suduko_solver/pySudoku.py b/python sudoku solver/suduko_solver/pySudoku.py new file mode 100644 index 0000000..598da86 --- /dev/null +++ b/python sudoku solver/suduko_solver/pySudoku.py @@ -0,0 +1,125 @@ +import fileinput +import time + +def print_sudoku(s): + + for row in range(9): + for col in range(9): + print(s[row][col], end=' ') + if col+1 == 3 or col+1 == 6: + print(" | ", end=' ') + if row+1 == 3 or row+1 == 6: + print("\n" + "-"*25, end=' ') + print() + print() + +def test_cell(s, row, col): + + used = [0]*10 + used[0] = 1 + block_row = row // 3 + block_col = col // 3 + + # Row and Column + for m in range(9): + used[s[m][col]] = 1; + used[s[row][m]] = 1; + + # Square + for m in range(3): + for n in range(3): + used[s[m + block_row*3][n + block_col*3]] = 1 + + return used + +def initial_try(s): + stuck = False + + while not stuck: + stuck = True + # Iterate through the Sudoku puzzle + for row in range(9): + for col in range(9): + used = test_cell(s, row, col) + # More than one possibility + if used.count(0) != 1: + continue + + for m in range(1, 10): + # If current cell is empty and there is only one possibility + # then fill in the current cell + if s[row][col] == 0 and used[m] == 0: + s[row][col] = m + stuck = False + break + +def DFS_solve(s, row, col): + + if row == 8 and col == 8: + used = test_cell(s, row, col) + if 0 in used: + s[row][col] = used.index(0) + return True + + if col == 9: + row = row+1 + col = 0 + + if s[row][col] == 0: + used = test_cell(s, row, col) + for i in range(1, 10): + if used[i] == 0: + s[row][col] = i + if DFS_solve(s, row, col+1): + return True + + # Reached here? Then we tried 1-9 without success + s[row][col] = 0 + return False + + return DFS_solve(s, row, col+1) + +def main(): + start = time.time() + num_puzzles = 0 + s = [] + text = "" + + for line in fileinput.input(): + line = ' '.join(line.split()) + text += line + + while len(text) > 0: + l = [] + + # Get a row of numbers + while len(l) < 9: + if text[0].isdigit(): + l.append(int(text[0])) + text = text[1:] + + # Insert that row into the Sudoku grid + s.append(l) + + if len(s) == 9: + num_puzzles += 1 + print("Puzzle Number {:d}".format(num_puzzles)) + print("Original:") + print_sudoku(s) + + initial_try(s) + for line in s: + if 0 in line: + DFS_solve(s, 0, 0) + break + + print("Solution:") + print_sudoku(s) + + print("="*30) + s = [] + + print("{:.2f} seconds to solve {} puzzles".format(time.time() - start, num_puzzles)) + +if __name__ == "__main__": + main() diff --git a/replit.nix b/replit.nix new file mode 100644 index 0000000..bb4b987 --- /dev/null +++ b/replit.nix @@ -0,0 +1,18 @@ +{ pkgs }: { + deps = [ + pkgs.python38Full + ]; + env = { + PYTHON_LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [ + # Needed for pandas / numpy + pkgs.stdenv.cc.cc.lib + pkgs.zlib + # Needed for pygame + pkgs.glib + # Needed for matplotlib + pkgs.xorg.libX11 + ]; + PYTHONBIN = "${pkgs.python38Full}/bin/python3.8"; + LANG = "en_US.UTF-8"; + }; +} \ No newline at end of file diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1 new file mode 100644 index 0000000..2fb3852 --- /dev/null +++ b/venv/bin/Activate.ps1 @@ -0,0 +1,241 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/venv/bin/activate b/venv/bin/activate new file mode 100644 index 0000000..5ee0c70 --- /dev/null +++ b/venv/bin/activate @@ -0,0 +1,76 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/home/runner/Python/venv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + if [ "x(venv) " != x ] ; then + PS1="(venv) ${PS1:-}" + else + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then + # special case for Aspen magic directories + # see https://aspen.io/ + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" + else + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" + fi + fi + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r +fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh new file mode 100644 index 0000000..b98c3ac --- /dev/null +++ b/venv/bin/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/runner/Python/venv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("venv" != "") then + set env_name = "venv" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see https://aspen.io/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish new file mode 100644 index 0000000..610161c --- /dev/null +++ b/venv/bin/activate.fish @@ -0,0 +1,75 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelevant variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/runner/Python/venv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + functions -c fish_prompt _old_fish_prompt + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command + set -l old_status $status + + # Prompt override? + if test -n "(venv) " + printf "%s%s" "(venv) " (set_color normal) + else + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see https://aspen.io/ + printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) + else + printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) + end + end + + # Restore the return status of the previous command. + echo "exit $old_status" | . + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/venv/bin/doesitcache b/venv/bin/doesitcache new file mode 100755 index 0000000..bbc98fd --- /dev/null +++ b/venv/bin/doesitcache @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from cachecontrol._cmd import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/f2py b/venv/bin/f2py new file mode 100755 index 0000000..3b487ee --- /dev/null +++ b/venv/bin/f2py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/f2py3 b/venv/bin/f2py3 new file mode 100755 index 0000000..3b487ee --- /dev/null +++ b/venv/bin/f2py3 @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/f2py3.8 b/venv/bin/f2py3.8 new file mode 100755 index 0000000..3b487ee --- /dev/null +++ b/venv/bin/f2py3.8 @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/flask b/venv/bin/flask new file mode 100755 index 0000000..f0a8a00 --- /dev/null +++ b/venv/bin/flask @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from flask.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/keyring b/venv/bin/keyring new file mode 100755 index 0000000..6392208 --- /dev/null +++ b/venv/bin/keyring @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from keyring.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/normalizer b/venv/bin/normalizer new file mode 100755 index 0000000..76e0d54 --- /dev/null +++ b/venv/bin/normalizer @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer.cli.normalizer import cli_detect +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli_detect()) diff --git a/venv/bin/pip b/venv/bin/pip new file mode 100755 index 0000000..33e407b --- /dev/null +++ b/venv/bin/pip @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) \ No newline at end of file diff --git a/venv/bin/pip3 b/venv/bin/pip3 new file mode 100755 index 0000000..73491e9 --- /dev/null +++ b/venv/bin/pip3 @@ -0,0 +1,8 @@ +#!/home/runner/Python/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3.8 b/venv/bin/pip3.8 new file mode 100755 index 0000000..73491e9 --- /dev/null +++ b/venv/bin/pip3.8 @@ -0,0 +1,8 @@ +#!/home/runner/Python/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pkginfo b/venv/bin/pkginfo new file mode 100755 index 0000000..c0d15f5 --- /dev/null +++ b/venv/bin/pkginfo @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from pkginfo.commandline import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/poetry b/venv/bin/poetry new file mode 100755 index 0000000..f3e8462 --- /dev/null +++ b/venv/bin/poetry @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from poetry.console import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pyflakes b/venv/bin/pyflakes new file mode 100755 index 0000000..50934b8 --- /dev/null +++ b/venv/bin/pyflakes @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from pyflakes.api import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pylsp b/venv/bin/pylsp new file mode 100755 index 0000000..ba19925 --- /dev/null +++ b/venv/bin/pylsp @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from pylsp.__main__ import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/python b/venv/bin/python new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/venv/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/venv/bin/python3 b/venv/bin/python3 new file mode 100755 index 0000000..8d98b7f --- /dev/null +++ b/venv/bin/python3 @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +LD_LIBRARY_PATH="${PYTHON_LD_LIBRARY_PATH}:${LD_LIBRARY_PATH}" exec -a "$0" "${PYTHONBIN}" "$@" \ No newline at end of file diff --git a/venv/bin/python3.8 b/venv/bin/python3.8 new file mode 120000 index 0000000..4c3f910 --- /dev/null +++ b/venv/bin/python3.8 @@ -0,0 +1 @@ +/home/runner/Python/venv/bin/python3 \ No newline at end of file diff --git a/venv/bin/replit b/venv/bin/replit new file mode 100755 index 0000000..99382ee --- /dev/null +++ b/venv/bin/replit @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from replit.__main__ import cli +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli()) diff --git a/venv/bin/virtualenv b/venv/bin/virtualenv new file mode 100755 index 0000000..73763c2 --- /dev/null +++ b/venv/bin/virtualenv @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from virtualenv.__main__ import run_with_catch +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(run_with_catch()) diff --git a/venv/bin/yapf b/venv/bin/yapf new file mode 100755 index 0000000..78deda5 --- /dev/null +++ b/venv/bin/yapf @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from yapf import run_main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(run_main()) diff --git a/venv/bin/yapf-diff b/venv/bin/yapf-diff new file mode 100755 index 0000000..99a641c --- /dev/null +++ b/venv/bin/yapf-diff @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import re +import sys +from yapf.third_party.yapf_diff.yapf_diff import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/LICENSE.txt b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/LICENSE.txt new file mode 120000 index 0000000..5472bbf --- /dev/null +++ b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/LICENSE.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/86/ee/ee/87be2a43f3ff1f56496f451f69243926f025fedbb033666c304c4c161b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/METADATA b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/METADATA new file mode 120000 index 0000000..838d82f --- /dev/null +++ b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9b/7d/e2/dc2ad8512710e874c92456698b083c5d18eeb270db33f3c05c5d717777 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/RECORD b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/RECORD new file mode 100644 index 0000000..531e720 --- /dev/null +++ b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/RECORD @@ -0,0 +1,34 @@ +../../../bin/doesitcache,sha256=1RWmJD4ag9idCYwglvOznVpra4pzgRqnBTbglKKmubo,220 +CacheControl-0.12.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +CacheControl-0.12.11.dist-info/LICENSE.txt,sha256=hu7uh74qQ_P_H1ZJb0UfaSQ5JvAl_tuwM2ZsMExMFhs,558 +CacheControl-0.12.11.dist-info/METADATA,sha256=m33i3CrYUScQ6HTJJFZpiwg8XRjusnDbM_PAXF1xd3c,2218 +CacheControl-0.12.11.dist-info/RECORD,, +CacheControl-0.12.11.dist-info/WHEEL,sha256=WzZ8cwjh8l0jtULNjYq1Hpr-WCqCRgPr--TX4P5I1Wo,110 +CacheControl-0.12.11.dist-info/entry_points.txt,sha256=HjCekaRCv8kfNqP5WehMR29IWxIA5VrhoOeKrCykCLc,56 +CacheControl-0.12.11.dist-info/top_level.txt,sha256=vGYWzpbe3h6gkakV4f7iCK2x3KyK3oMkV5pe5v25-d4,13 +cachecontrol/__init__.py,sha256=hrxlv3q7upsfyMw8k3gQ9vagBax1pYHSGGqYlZ0Zk0M,465 +cachecontrol/__pycache__/__init__.cpython-38.pyc,, +cachecontrol/__pycache__/_cmd.cpython-38.pyc,, +cachecontrol/__pycache__/adapter.cpython-38.pyc,, +cachecontrol/__pycache__/cache.cpython-38.pyc,, +cachecontrol/__pycache__/compat.cpython-38.pyc,, +cachecontrol/__pycache__/controller.cpython-38.pyc,, +cachecontrol/__pycache__/filewrapper.cpython-38.pyc,, +cachecontrol/__pycache__/heuristics.cpython-38.pyc,, +cachecontrol/__pycache__/serialize.cpython-38.pyc,, +cachecontrol/__pycache__/wrapper.cpython-38.pyc,, +cachecontrol/_cmd.py,sha256=HjFJdGgPOLsvS_5e8BvqYrweXJj1gR7dSsqCB1X24uw,1326 +cachecontrol/adapter.py,sha256=du8CsHKttAWL9-pWmSvyNDVzHrH-qfiSOgo6fcanM-0,5021 +cachecontrol/cache.py,sha256=Tty45fOjH40fColTGkqKQvQQmbYsMpk-nCyfLcv2vG4,1535 +cachecontrol/caches/__init__.py,sha256=h-1cUmOz6mhLsjTjOrJ8iPejpGdLCyG4lzTftfGZvLg,242 +cachecontrol/caches/__pycache__/__init__.cpython-38.pyc,, +cachecontrol/caches/__pycache__/file_cache.cpython-38.pyc,, +cachecontrol/caches/__pycache__/redis_cache.cpython-38.pyc,, +cachecontrol/caches/file_cache.py,sha256=GpexcE29LoY4MaZwPUTcUBZaDdcsjqyLxZFznk8Hbr4,5271 +cachecontrol/caches/redis_cache.py,sha256=bGKAU0IOcJUFmU_blBI3w6zKj1L4IyfDPmLNyzjp_y4,1021 +cachecontrol/compat.py,sha256=JOVKyIibp8nNq3jAbv7nXBsNOtXHVEKPh5u8r2qLGVo,730 +cachecontrol/controller.py,sha256=6jyT4j4LFsIvfDeSFD6xKK15RCOInwE2Nr8Ug7ICtww,16404 +cachecontrol/filewrapper.py,sha256=X4BAQOO26GNOR7nH_fhTzAfeuct2rBQcx_15MyFBpcs,3946 +cachecontrol/heuristics.py,sha256=8kAyuZLSCyEIgQr6vbUwfhpqg9ows4mM0IV6DWazevI,4154 +cachecontrol/serialize.py,sha256=qbMVbnUSDuQRlJWTn6-CKrXYShaUn9dSigtvtAaI3xA,7076 +cachecontrol/wrapper.py,sha256=X3-KMZ20Ho3VtqyVaXclpeQpFzokR5NE8tZSfvKVaB8,774 diff --git a/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/WHEEL b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/WHEEL new file mode 120000 index 0000000..cd83a08 --- /dev/null +++ b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5b/36/7c/7308e1f25d23b542cd8d8ab51e9afe582a824603ebfbe4d7e0fe48d56a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/entry_points.txt new file mode 120000 index 0000000..d1e5058 --- /dev/null +++ b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/entry_points.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1e/30/9e/91a442bfc91f36a3f959e84c476f485b1200e55ae1a0e78aac2ca408b7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/top_level.txt new file mode 120000 index 0000000..bd1b302 --- /dev/null +++ b/venv/lib/python3.8/site-packages/CacheControl-0.12.11.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bc/66/16/ce96dede1ea091a915e1fee208adb1dcac8ade8324579a5ee6fdb9f9de \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/LICENSE.rst new file mode 120000 index 0000000..59fa4ff --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/LICENSE.rst @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/48/9a/8e/1108509ed98a37bb983e11e0f7e1d31f0bd8f99a79c8448e7ff37d07ea \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/METADATA new file mode 100644 index 0000000..f644287 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/METADATA @@ -0,0 +1,123 @@ +Metadata-Version: 2.1 +Name: Flask +Version: 2.2.2 +Summary: A simple framework for building complex web applications. +Home-page: https://palletsprojects.com/p/flask +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://flask.palletsprojects.com/ +Project-URL: Changes, https://flask.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/flask/ +Project-URL: Issue Tracker, https://github.com/pallets/flask/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Framework :: Flask +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: Werkzeug (>=2.2.2) +Requires-Dist: Jinja2 (>=3.0) +Requires-Dist: itsdangerous (>=2.0) +Requires-Dist: click (>=8.0) +Requires-Dist: importlib-metadata (>=3.6.0) ; python_version < "3.10" +Provides-Extra: async +Requires-Dist: asgiref (>=3.2) ; extra == 'async' +Provides-Extra: dotenv +Requires-Dist: python-dotenv ; extra == 'dotenv' + +Flask +===== + +Flask is a lightweight `WSGI`_ web application framework. It is designed +to make getting started quick and easy, with the ability to scale up to +complex applications. It began as a simple wrapper around `Werkzeug`_ +and `Jinja`_ and has become one of the most popular Python web +application frameworks. + +Flask offers suggestions, but doesn't enforce any dependencies or +project layout. It is up to the developer to choose the tools and +libraries they want to use. There are many extensions provided by the +community that make adding new functionality easy. + +.. _WSGI: https://wsgi.readthedocs.io/ +.. _Werkzeug: https://werkzeug.palletsprojects.com/ +.. _Jinja: https://jinja.palletsprojects.com/ + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Flask + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + # save this as app.py + from flask import Flask + + app = Flask(__name__) + + @app.route("/") + def hello(): + return "Hello, World!" + +.. code-block:: text + + $ flask run + * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) + + +Contributing +------------ + +For guidance on setting up a development environment and how to make a +contribution to Flask, see the `contributing guidelines`_. + +.. _contributing guidelines: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst + + +Donate +------ + +The Pallets organization develops and supports Flask and the libraries +it uses. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://flask.palletsprojects.com/ +- Changes: https://flask.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Flask/ +- Source Code: https://github.com/pallets/flask/ +- Issue Tracker: https://github.com/pallets/flask/issues/ +- Website: https://palletsprojects.com/p/flask/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets diff --git a/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/RECORD new file mode 100644 index 0000000..7b2da9a --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/RECORD @@ -0,0 +1,55 @@ +../../../bin/flask,sha256=zlX3XNioVdWkhNN06HgSDjOrwShMB-zWhkA5sJxFe80,212 +Flask-2.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Flask-2.2.2.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +Flask-2.2.2.dist-info/METADATA,sha256=UXiwRLD1johd_tGlYOlOKXkJFIG82ehR3bxqh4XWFwA,3889 +Flask-2.2.2.dist-info/RECORD,, +Flask-2.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Flask-2.2.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +Flask-2.2.2.dist-info/direct_url.json,sha256=XQThV3oowNgaIZZmWxUJextrVvDo0775OG4dV8O9u8U,247 +Flask-2.2.2.dist-info/entry_points.txt,sha256=s3MqQpduU25y4dq3ftBYD6bMVdVnbMpZP-sUNw0zw0k,41 +Flask-2.2.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6 +flask/__init__.py,sha256=Y4mEWqAMxj_Oxq9eYv3tWyN-0nU9yVKBGK_t6BxqvvM,2890 +flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 +flask/__pycache__/__init__.cpython-38.pyc,, +flask/__pycache__/__main__.cpython-38.pyc,, +flask/__pycache__/app.cpython-38.pyc,, +flask/__pycache__/blueprints.cpython-38.pyc,, +flask/__pycache__/cli.cpython-38.pyc,, +flask/__pycache__/config.cpython-38.pyc,, +flask/__pycache__/ctx.cpython-38.pyc,, +flask/__pycache__/debughelpers.cpython-38.pyc,, +flask/__pycache__/globals.cpython-38.pyc,, +flask/__pycache__/helpers.cpython-38.pyc,, +flask/__pycache__/logging.cpython-38.pyc,, +flask/__pycache__/scaffold.cpython-38.pyc,, +flask/__pycache__/sessions.cpython-38.pyc,, +flask/__pycache__/signals.cpython-38.pyc,, +flask/__pycache__/templating.cpython-38.pyc,, +flask/__pycache__/testing.cpython-38.pyc,, +flask/__pycache__/typing.cpython-38.pyc,, +flask/__pycache__/views.cpython-38.pyc,, +flask/__pycache__/wrappers.cpython-38.pyc,, +flask/app.py,sha256=VfBcGmEVveMcSajkUmDXCEOvAd-2mIBJ355KicvQ4gE,99025 +flask/blueprints.py,sha256=Jbrt-2jlLiFklC3De9EWBioPtDjHYYbXlTDK9Z7L2nk,26936 +flask/cli.py,sha256=foLlD8NiIXcxpxMmRQvvlZPbVM8pxOaJG3sa58c9dAA,33486 +flask/config.py,sha256=IWqHecH4poDxNEUg4U_ZA1CTlL5BKZDX3ofG4UGYyi0,12584 +flask/ctx.py,sha256=ZOGEWuFjsCIk3vm-C9pLME0e4saeBkeGpr2tTSvemSM,14851 +flask/debughelpers.py,sha256=_RvAL3TW5lqMJeCVWtTU6rSDJC7jnRaBL6OEkVmooyU,5511 +flask/globals.py,sha256=1DLZMi8Su-S1gf8zEiR3JPi6VXYIrYqm8C9__Ly66ss,3187 +flask/helpers.py,sha256=ELq27745jihrdyAP9qY8KENlCVDdnWRWTIn35L9a-UU,25334 +flask/json/__init__.py,sha256=TOwldHT3_kFaXHlORKi9yCWt7dbPNB0ovdHHQWlSRzY,11175 +flask/json/__pycache__/__init__.cpython-38.pyc,, +flask/json/__pycache__/provider.cpython-38.pyc,, +flask/json/__pycache__/tag.cpython-38.pyc,, +flask/json/provider.py,sha256=jXCNypf11PN4ngQjEt6LnSdCWQ1yHIAkNLHlXQlCB-A,10674 +flask/json/tag.py,sha256=fys3HBLssWHuMAIJuTcf2K0bCtosePBKXIWASZEEjnU,8857 +flask/logging.py,sha256=WYng0bLTRS_CJrocGcCLJpibHf1lygHE_pg-KoUIQ4w,2293 +flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +flask/scaffold.py,sha256=tiQRK-vMY5nucoN6pewXF87GaxrltsCGOgTVsT6wm7s,33443 +flask/sessions.py,sha256=66oGlE-v9iac-eb54tFN3ILAjJ1FeeuHHWw98UVaoxc,15847 +flask/signals.py,sha256=H7QwDciK-dtBxinjKpexpglP0E6k0MJILiFWTItfmqU,2136 +flask/templating.py,sha256=1P4OzvSnA2fsJTYgQT3G4owVKsuOz8XddCiR6jMHGJ0,7419 +flask/testing.py,sha256=p51f9P7jDc_IDSiZug7jypnfVcxsQrMg3B2tnjlpEFw,10596 +flask/typing.py,sha256=KgxegTF9v9WvuongeF8LooIvpZPauzGrq9ZXf3gBlYc,2969 +flask/views.py,sha256=bveWilivkPP-4HB9w_fOusBz6sHNIl0QTqKUFMCltzE,6738 +flask/wrappers.py,sha256=Wa-bhjNdPPveSHS1dpzD_r-ayZxIYFF1DoWncKOafrk,5695 diff --git a/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/WHEEL new file mode 120000 index 0000000..7e89bc1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1b/5e/87/e00dc87a84269cead8578b9e6462928e18a95f1f3373c9eef451a5bcc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/direct_url.json new file mode 100644 index 0000000..efc238d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"}, "url": "https://files.pythonhosted.org/packages/0f/43/15f4f9ab225b0b25352412e8daa3d0e3d135fcf5e127070c74c3632c8b4c/Flask-2.2.2-py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/entry_points.txt new file mode 120000 index 0000000..47a4075 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/entry_points.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b3/73/2a/42976e536e72e1dab77ed0580fa6cc55d5676cca593feb14370d33c349 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/top_level.txt new file mode 120000 index 0000000..9ac1d61 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.2.2.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/76/f8/ba/e45e807865955344c1a58882d38c8ceb4f855f58091642b7d48290af97 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst new file mode 120000 index 0000000..f3f3fde --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3b/49/dc/ee4105eb37bac10faf1be260408fe85d252b8e9df2e0979fc1e094437b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/METADATA new file mode 120000 index 0000000..0eb7524 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3d/9e/af/d9222274c362c51ecc4545fd7fb656b0fc2d5da9e49ea89952645b8785 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/RECORD new file mode 100644 index 0000000..299ed1a --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/RECORD @@ -0,0 +1,60 @@ +Jinja2-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Jinja2-3.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Jinja2-3.1.2.dist-info/METADATA,sha256=PZ6v2SIidMNixR7MRUX9f7ZWsPwtXanknqiZUmRbh4U,3539 +Jinja2-3.1.2.dist-info/RECORD,, +Jinja2-3.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Jinja2-3.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +Jinja2-3.1.2.dist-info/direct_url.json,sha256=K69RefqBEWYZc2jmGN4sRKonqLehuYqbwRV1o7hAygU,248 +Jinja2-3.1.2.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59 +Jinja2-3.1.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 +jinja2/__init__.py,sha256=8vGduD8ytwgD6GDSqpYc2m3aU-T7PKOAddvVXgGr_Fs,1927 +jinja2/__pycache__/__init__.cpython-38.pyc,, +jinja2/__pycache__/_identifier.cpython-38.pyc,, +jinja2/__pycache__/async_utils.cpython-38.pyc,, +jinja2/__pycache__/bccache.cpython-38.pyc,, +jinja2/__pycache__/compiler.cpython-38.pyc,, +jinja2/__pycache__/constants.cpython-38.pyc,, +jinja2/__pycache__/debug.cpython-38.pyc,, +jinja2/__pycache__/defaults.cpython-38.pyc,, +jinja2/__pycache__/environment.cpython-38.pyc,, +jinja2/__pycache__/exceptions.cpython-38.pyc,, +jinja2/__pycache__/ext.cpython-38.pyc,, +jinja2/__pycache__/filters.cpython-38.pyc,, +jinja2/__pycache__/idtracking.cpython-38.pyc,, +jinja2/__pycache__/lexer.cpython-38.pyc,, +jinja2/__pycache__/loaders.cpython-38.pyc,, +jinja2/__pycache__/meta.cpython-38.pyc,, +jinja2/__pycache__/nativetypes.cpython-38.pyc,, +jinja2/__pycache__/nodes.cpython-38.pyc,, +jinja2/__pycache__/optimizer.cpython-38.pyc,, +jinja2/__pycache__/parser.cpython-38.pyc,, +jinja2/__pycache__/runtime.cpython-38.pyc,, +jinja2/__pycache__/sandbox.cpython-38.pyc,, +jinja2/__pycache__/tests.cpython-38.pyc,, +jinja2/__pycache__/utils.cpython-38.pyc,, +jinja2/__pycache__/visitor.cpython-38.pyc,, +jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 +jinja2/async_utils.py,sha256=dHlbTeaxFPtAOQEYOGYh_PHcDT0rsDaUJAFDl_0XtTg,2472 +jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061 +jinja2/compiler.py,sha256=Gs-N8ThJ7OWK4-reKoO8Wh1ZXz95MVphBKNVf75qBr8,72172 +jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 +jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299 +jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 +jinja2/environment.py,sha256=6uHIcc7ZblqOMdx_uYNKqRnnwAF0_nzbyeMP9FFtuh4,61349 +jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 +jinja2/ext.py,sha256=ivr3P7LKbddiXDVez20EflcO3q2aHQwz9P_PgWGHVqE,31502 +jinja2/filters.py,sha256=9js1V-h2RlyW90IhLiBGLM2U-k6SCy2F4BUUMgB3K9Q,53509 +jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704 +jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726 +jinja2/loaders.py,sha256=BfptfvTVpClUd-leMkHczdyPNYFzp_n7PKOJ98iyHOg,23207 +jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396 +jinja2/nativetypes.py,sha256=DXgORDPRmVWgy034H0xL8eF7qYoK3DrMxs-935d0Fzk,4226 +jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550 +jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650 +jinja2/parser.py,sha256=nHd-DFHbiygvfaPtm9rcQXJChZG7DPsWfiEsqfwKerY,39595 +jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jinja2/runtime.py,sha256=5CmD5BjbEJxSiDNTFBeKCaq8qU4aYD2v6q2EluyExms,33476 +jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584 +jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905 +jinja2/utils.py,sha256=u9jXESxGn8ATZNVolwmkjUVu4SA-tLgV0W7PcSfPfdQ,23965 +jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568 diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/WHEEL new file mode 120000 index 0000000..7e89bc1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1b/5e/87/e00dc87a84269cead8578b9e6462928e18a95f1f3373c9eef451a5bcc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/direct_url.json new file mode 100644 index 0000000..cd884a0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, "url": "https://files.pythonhosted.org/packages/bc/c3/f068337a370801f372f2f8f6bad74a5c140f6fda3d9de154052708dd3c65/Jinja2-3.1.2-py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt new file mode 120000 index 0000000..6cc98ee --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cd/17/7a/d9f6ea2327d4a6c46d53b115205ca2bb5b4fc1f80eec4bcf12b9f18131 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/top_level.txt new file mode 120000 index 0000000..18adf25 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3e/47/95/5ad2dbdfe0aa8d68b57cedbd3826e3e791215ff72184aac276b55eff3b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst new file mode 120000 index 0000000..59fa4ff --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/48/9a/8e/1108509ed98a37bb983e11e0f7e1d31f0bd8f99a79c8448e7ff37d07ea \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/METADATA new file mode 120000 index 0000000..5c6018a --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0c/2f/77/56cce6ce32d072b542851523b56e176d4c234dd6daa65a607656c575bb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/RECORD new file mode 100644 index 0000000..bc1092a --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/RECORD @@ -0,0 +1,16 @@ +MarkupSafe-2.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-2.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-2.1.1.dist-info/METADATA,sha256=DC93VszmzjLQcrVChRUjtW4XbUwjTdbaplpgdlbFdbs,3242 +MarkupSafe-2.1.1.dist-info/RECORD,, +MarkupSafe-2.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +MarkupSafe-2.1.1.dist-info/WHEEL,sha256=paN2rHE-sLfyg0Z4YvQnentMRWXxZnkclRDH8E5J6qk,148 +MarkupSafe-2.1.1.dist-info/direct_url.json,sha256=lu1LRSarKVNGdEHSIPeAB6MRhFGfseT-AhWcoOUVDuk,292 +MarkupSafe-2.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=xfaUQkKNRTdYWe6HnnJ2HjguFmS-C_0H6g8-Q9VAfkQ,9284 +markupsafe/__pycache__/__init__.cpython-38.pyc,, +markupsafe/__pycache__/_native.cpython-38.pyc,, +markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713 +markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083 +markupsafe/_speedups.cpython-38-x86_64-linux-gnu.so,sha256=gBmi2f9vNFVvJs2gdtjYKwK0tIgrxEqVUMbyL-1roRo,45008 +markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL new file mode 120000 index 0000000..895e565 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a5/a3/76/ac713eb0b7f283467862f4277a7b4c4565f166791c9510c7f04e49eaa9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/direct_url.json new file mode 100644 index 0000000..c1daa6a --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, "url": "https://files.pythonhosted.org/packages/fd/f4/524d2e8f5a3727cf309c2b7df7c732038375322df1376c9e9ef3aa92fcaf/MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt new file mode 120000 index 0000000..303dd63 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ab/2d/0f/9637b9209bafb020637a32728430a310075c0cb2bfd9a81571ec7c67a5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/LICENSE b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/LICENSE new file mode 120000 index 0000000..3f2e66f --- /dev/null +++ b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/70/f6/bf/ca77633c35efa218328eda5486d7054c2914d61820980e378797520a25 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/METADATA new file mode 120000 index 0000000..96b9faa --- /dev/null +++ b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ca/e7/3c/619c4769b5e825bb276fa5ef6474fa6a2b6bad54806325c3d1a3c120cd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/RECORD new file mode 100644 index 0000000..da471cd --- /dev/null +++ b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/RECORD @@ -0,0 +1,21 @@ +SecretStorage-3.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +SecretStorage-3.3.2.dist-info/LICENSE,sha256=cPa_yndjPDXvohgyjtpUhtcFTCkU1hggmA43h5dSCiU,1504 +SecretStorage-3.3.2.dist-info/METADATA,sha256=yuc8YZxHabXoJbsnb6XvZHT6aitrrVSAYyXD0aPBIM0,4016 +SecretStorage-3.3.2.dist-info/RECORD,, +SecretStorage-3.3.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +SecretStorage-3.3.2.dist-info/top_level.txt,sha256=hveSi1OWGaEt3kEVbjmZ0M-ASPxi6y-nTPVa-d3c0B4,14 +secretstorage/__init__.py,sha256=apdh9eMplfOKfwhaczQ5azAzBc_tAfyijZk_wDsUReo,3180 +secretstorage/__pycache__/__init__.cpython-38.pyc,, +secretstorage/__pycache__/collection.cpython-38.pyc,, +secretstorage/__pycache__/defines.cpython-38.pyc,, +secretstorage/__pycache__/dhcrypto.cpython-38.pyc,, +secretstorage/__pycache__/exceptions.cpython-38.pyc,, +secretstorage/__pycache__/item.cpython-38.pyc,, +secretstorage/__pycache__/util.cpython-38.pyc,, +secretstorage/collection.py,sha256=vaep-OCM65Z7xznmH25jQWCJ_fLaZ2RpY5pIdNZJz6U,8408 +secretstorage/defines.py,sha256=x2lf3W2Ks8TV52tldb-Fn1u_Rmd5-AfeXBx2HzeGXy0,824 +secretstorage/dhcrypto.py,sha256=MVeCaOEbjda4YnXWPcq4VU_eIMUvJtGiEsWUCXC7PJo,2417 +secretstorage/exceptions.py,sha256=MEgrgRxY7-iW_QI3FH7WN3JGupA-Fzl8_r1-Y3XwwHU,1576 +secretstorage/item.py,sha256=3NJzakoFWeg59kFjgzGRGlvgg3BgrgT-56QMrFVL9fw,5241 +secretstorage/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +secretstorage/util.py,sha256=gTGF_trbTfG_CN-iZCvKfjEK1E9drNAYG2DcvZGOxTo,6121 diff --git a/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/WHEEL new file mode 120000 index 0000000..7e89bc1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1b/5e/87/e00dc87a84269cead8578b9e6462928e18a95f1f3373c9eef451a5bcc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/top_level.txt new file mode 120000 index 0000000..bf545c4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/SecretStorage-3.3.2.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/86/f7/92/8b539619a12dde41156e3999d0cf8048fc62eb2fa74cf55af9dddcd01e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/LICENSE.rst new file mode 120000 index 0000000..f3f3fde --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/LICENSE.rst @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3b/49/dc/ee4105eb37bac10faf1be260408fe85d252b8e9df2e0979fc1e094437b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/METADATA new file mode 100644 index 0000000..a40cd1b --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/METADATA @@ -0,0 +1,126 @@ +Metadata-Version: 2.1 +Name: Werkzeug +Version: 2.2.2 +Summary: The comprehensive WSGI web application library. +Home-page: https://palletsprojects.com/p/werkzeug/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://werkzeug.palletsprojects.com/ +Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/werkzeug/ +Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: MarkupSafe (>=2.1.1) +Provides-Extra: watchdog +Requires-Dist: watchdog ; extra == 'watchdog' + +Werkzeug +======== + +*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") + +Werkzeug is a comprehensive `WSGI`_ web application library. It began as +a simple collection of various utilities for WSGI applications and has +become one of the most advanced WSGI utility libraries. + +It includes: + +- An interactive debugger that allows inspecting stack traces and + source code in the browser with an interactive interpreter for any + frame in the stack. +- A full-featured request object with objects to interact with + headers, query args, form data, files, and cookies. +- A response object that can wrap other WSGI applications and handle + streaming data. +- A routing system for matching URLs to endpoints and generating URLs + for endpoints, with an extensible system for capturing variables + from URLs. +- HTTP utilities to handle entity tags, cache control, dates, user + agents, cookies, files, and more. +- A threaded WSGI server for use while developing applications + locally. +- A test client for simulating HTTP requests during testing without + requiring running a server. + +Werkzeug doesn't enforce any dependencies. It is up to the developer to +choose a template engine, database adapter, and even how to handle +requests. It can be used to build all sorts of end user applications +such as blogs, wikis, or bulletin boards. + +`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +providing more structure and patterns for defining powerful +applications. + +.. _WSGI: https://wsgi.readthedocs.io/en/latest/ +.. _Flask: https://www.palletsprojects.com/p/flask/ + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Werkzeug + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + from werkzeug.wrappers import Request, Response + + @Request.application + def application(request): + return Response('Hello, World!') + + if __name__ == '__main__': + from werkzeug.serving import run_simple + run_simple('localhost', 4000, application) + + +Donate +------ + +The Pallets organization develops and supports Werkzeug and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://werkzeug.palletsprojects.com/ +- Changes: https://werkzeug.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Werkzeug/ +- Source Code: https://github.com/pallets/werkzeug/ +- Issue Tracker: https://github.com/pallets/werkzeug/issues/ +- Website: https://palletsprojects.com/p/werkzeug/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/RECORD new file mode 100644 index 0000000..2dd672f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/RECORD @@ -0,0 +1,100 @@ +Werkzeug-2.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Werkzeug-2.2.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Werkzeug-2.2.2.dist-info/METADATA,sha256=hz42ndovEQQy3rwXKZDwR7LA4UNthKegxf_7xIQrjsM,4416 +Werkzeug-2.2.2.dist-info/RECORD,, +Werkzeug-2.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Werkzeug-2.2.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +Werkzeug-2.2.2.dist-info/direct_url.json,sha256=FoJYomfozkB3vuoCfDo9EYCyp2suDRKGjub8ZOZZBdA,250 +Werkzeug-2.2.2.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 +werkzeug/__init__.py,sha256=UP218Ddd2NYm1dUhTlhvGRQytzAx1Ms1A716UKiPOYk,188 +werkzeug/__pycache__/__init__.cpython-38.pyc,, +werkzeug/__pycache__/_internal.cpython-38.pyc,, +werkzeug/__pycache__/_reloader.cpython-38.pyc,, +werkzeug/__pycache__/datastructures.cpython-38.pyc,, +werkzeug/__pycache__/exceptions.cpython-38.pyc,, +werkzeug/__pycache__/formparser.cpython-38.pyc,, +werkzeug/__pycache__/http.cpython-38.pyc,, +werkzeug/__pycache__/local.cpython-38.pyc,, +werkzeug/__pycache__/security.cpython-38.pyc,, +werkzeug/__pycache__/serving.cpython-38.pyc,, +werkzeug/__pycache__/test.cpython-38.pyc,, +werkzeug/__pycache__/testapp.cpython-38.pyc,, +werkzeug/__pycache__/urls.cpython-38.pyc,, +werkzeug/__pycache__/user_agent.cpython-38.pyc,, +werkzeug/__pycache__/utils.cpython-38.pyc,, +werkzeug/__pycache__/wsgi.cpython-38.pyc,, +werkzeug/_internal.py,sha256=g8PHJz2z39I3x0vwTvTKbXIg0eUQqGF9UtUzDMWT0Qw,16222 +werkzeug/_reloader.py,sha256=lYStlIDduTxBOB8BSozy_44HQ7YT5fup-x3uuac1-2o,14331 +werkzeug/datastructures.py,sha256=T1SRE_KRuNz9Q7P-Ck4PyKPyil1NOx9zDuNMLgrN1Z0,97083 +werkzeug/datastructures.pyi,sha256=HRzDLc7A6qnwluhNqn6AT76CsLZIkAbVVqxn0AbfV-s,34506 +werkzeug/debug/__init__.py,sha256=Gpq6OpS6mHwHk0mJkHc2fWvvjo6ccJVS9QJwJgoeb9I,18893 +werkzeug/debug/__pycache__/__init__.cpython-38.pyc,, +werkzeug/debug/__pycache__/console.cpython-38.pyc,, +werkzeug/debug/__pycache__/repr.cpython-38.pyc,, +werkzeug/debug/__pycache__/tbtools.cpython-38.pyc,, +werkzeug/debug/console.py,sha256=dechqiCtHfs0AQZWZofUC1S97tCuvwDgT0gdha5KwWM,6208 +werkzeug/debug/repr.py,sha256=FFczy4yhVfEQjW99HuZtUce-ebtJWMjp9GnfasXa0KA,9488 +werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222 +werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 +werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521 +werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 +werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 +werkzeug/debug/shared/style.css,sha256=-xSxzUEZGw_IqlDR5iZxitNl8LQUjBM-_Y4UAvXVH8g,6078 +werkzeug/debug/tbtools.py,sha256=Fsmlk6Ao3CcXm9iX7i_8MhCp2SQZ8uHm8Cf5wacnlW4,13293 +werkzeug/exceptions.py,sha256=5MFy6RyaU4nokoYzdDafloY51QUDIGVNKeK_FORUFS0,26543 +werkzeug/formparser.py,sha256=rLEu_ZwVpvqshZg6E4Qiv36QsmzmCytTijBeGX3dDGk,16056 +werkzeug/http.py,sha256=i_LrIU9KsOz27zfkwKIK6eifFuFMKgSuW15k57HbMiE,42162 +werkzeug/local.py,sha256=1IRMV9MFrauLaZeliF0Md1n7ZOcOKLbS03bnQ8Gz5WY,22326 +werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500 +werkzeug/middleware/__pycache__/__init__.cpython-38.pyc,, +werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc,, +werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc,, +werkzeug/middleware/__pycache__/lint.cpython-38.pyc,, +werkzeug/middleware/__pycache__/profiler.cpython-38.pyc,, +werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc,, +werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc,, +werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580 +werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558 +werkzeug/middleware/lint.py,sha256=Sr6gV4royDs6ezkqv5trRAyKMDQ60KaEq3-tQ3opUvw,13968 +werkzeug/middleware/profiler.py,sha256=QkXk7cqnaPnF8wQu-5SyPCIOT3_kdABUBorQOghVNOA,4899 +werkzeug/middleware/proxy_fix.py,sha256=l7LC_LDu0Yd4SvUxS5SFigAJMzcIOGm6LNKl9IXJBSU,6974 +werkzeug/middleware/shared_data.py,sha256=fXjrEkuqxUVLG1DLrOdQLc96QQdjftCBZ1oM5oK89h4,9528 +werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/routing/__init__.py,sha256=HpvahY7WwkLdV4Cq3Bsc3GrqNon4u6t8-vhbb9E5o00,4819 +werkzeug/routing/__pycache__/__init__.cpython-38.pyc,, +werkzeug/routing/__pycache__/converters.cpython-38.pyc,, +werkzeug/routing/__pycache__/exceptions.cpython-38.pyc,, +werkzeug/routing/__pycache__/map.cpython-38.pyc,, +werkzeug/routing/__pycache__/matcher.cpython-38.pyc,, +werkzeug/routing/__pycache__/rules.cpython-38.pyc,, +werkzeug/routing/converters.py,sha256=05bkekg64vLC6mqqK4ddBh589WH9yBsjtW8IJhdUBvw,6968 +werkzeug/routing/exceptions.py,sha256=RklUDL9ajOv2fTcRNj4pb18Bs4Y-GKk4rIeTSfsqkks,4737 +werkzeug/routing/map.py,sha256=XN4ZjzEF1SfMxtdov89SDE-1_U78KVnnobTfnHzqbmE,36757 +werkzeug/routing/matcher.py,sha256=U8xZTB3e5f3TgbkxdDyVuyxK4w72l1lo_b3tdG2zNrc,7122 +werkzeug/routing/rules.py,sha256=v27RaR5H3sIPRdJ_pdEfOBMN6EivFVpmFzJk7aizdyw,31072 +werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/sansio/__pycache__/__init__.cpython-38.pyc,, +werkzeug/sansio/__pycache__/http.cpython-38.pyc,, +werkzeug/sansio/__pycache__/multipart.cpython-38.pyc,, +werkzeug/sansio/__pycache__/request.cpython-38.pyc,, +werkzeug/sansio/__pycache__/response.cpython-38.pyc,, +werkzeug/sansio/__pycache__/utils.cpython-38.pyc,, +werkzeug/sansio/http.py,sha256=9eORg44CDxpmV9i_U_pZ_NR8gdc9UXFCdE7EAP1v-c0,5162 +werkzeug/sansio/multipart.py,sha256=Uyrg2U6s2oft8LXOyuTvFCWTLOEr7INVW8zFTXNwZ7A,9756 +werkzeug/sansio/request.py,sha256=SiGcx2cz-l81TlCCrKrT2fePqC64hN8fSg5Ig6J6vRs,20175 +werkzeug/sansio/response.py,sha256=UTl-teQDDjovrZMkjj3ZQsHw-JtiFak5JfKEk1_vBYU,26026 +werkzeug/sansio/utils.py,sha256=EjbqdHdT-JZWgjUQaaWSgBUIRprXUkrsMQQqJlJHpVU,4847 +werkzeug/security.py,sha256=vrBofh4WZZoUo1eAdJ6F1DrzVRlYauGS2CUDYpbQKj8,4658 +werkzeug/serving.py,sha256=18pfjrHw8b5UCgPPo1ZEoxlYZZ5UREl4jQ9f8LGWMAo,38458 +werkzeug/test.py,sha256=t7T5G-HciIlv1ZXtlydFVpow0VrXnJ_Y3yyEB7T0_Ww,48125 +werkzeug/testapp.py,sha256=RJhT_2JweNiMKe304N3bF1zaIeMpRx-CIMERdeydfTY,9404 +werkzeug/urls.py,sha256=Q9Si-eVh7yxk3rwkzrwGRm146FXVXgg9lBP3k0HUfVM,36600 +werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420 +werkzeug/utils.py,sha256=OYdB2cZPYYgq3C0EVKMIv05BrYzzr9xdefW0H00_IVo,24936 +werkzeug/wrappers/__init__.py,sha256=kGyK7rOud3qCxll_jFyW15YarJhj1xtdf3ocx9ZheB8,120 +werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/request.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/response.cpython-38.pyc,, +werkzeug/wrappers/request.py,sha256=UQ559KkGS0Po6HTBgvKMlk1_AsNw5zstzm8o_dRrfdQ,23415 +werkzeug/wrappers/response.py,sha256=c2HUXrrt5Sf8-XEB1fUXxm6jp7Lu80KR0A_tbQFvw1Q,34750 +werkzeug/wsgi.py,sha256=sgkFCzhl23hlSmbvjxbI-hVEjSlPuEBGTDAHmXFcAts,34732 diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/WHEEL new file mode 120000 index 0000000..7e89bc1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1b/5e/87/e00dc87a84269cead8578b9e6462928e18a95f1f3373c9eef451a5bcc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/direct_url.json new file mode 100644 index 0000000..4ebe528 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, "url": "https://files.pythonhosted.org/packages/c8/27/be6ddbcf60115305205de79c29004a0c6bc53cec814f733467b1bb89386d/Werkzeug-2.2.2-py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/top_level.txt new file mode 120000 index 0000000..dd1a539 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.2.2.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/41/1c/a3/d958f0268424af08f01483a507c5e0debf6e9f436da951d017ed79c5ac \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc b/venv/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc new file mode 100644 index 0000000..4d1b352 Binary files /dev/null and b/venv/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/__pycache__/typing_extensions.cpython-38.pyc b/venv/lib/python3.8/site-packages/__pycache__/typing_extensions.cpython-38.pyc new file mode 100644 index 0000000..54fc53b Binary files /dev/null and b/venv/lib/python3.8/site-packages/__pycache__/typing_extensions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/__pycache__/zipp.cpython-38.pyc b/venv/lib/python3.8/site-packages/__pycache__/zipp.cpython-38.pyc new file mode 100644 index 0000000..fb9d0fc Binary files /dev/null and b/venv/lib/python3.8/site-packages/__pycache__/zipp.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/_cffi_backend.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/_cffi_backend.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..a06c350 Binary files /dev/null and b/venv/lib/python3.8/site-packages/_cffi_backend.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/_distutils_hack/__init__.py b/venv/lib/python3.8/site-packages/_distutils_hack/__init__.py new file mode 120000 index 0000000..68775f5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/_distutils_hack/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/32/2f/93/bf23a7b8ccd421cfeeb68129e66dccfc363a40b2d2917aa0e9ef30848f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..0a9d138 Binary files /dev/null and b/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/override.cpython-38.pyc b/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/override.cpython-38.pyc new file mode 100644 index 0000000..16beffc Binary files /dev/null and b/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/override.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/_distutils_hack/override.py b/venv/lib/python3.8/site-packages/_distutils_hack/override.py new file mode 120000 index 0000000..36122c8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/12/ef/ec/f8d17a5486780aa774b5b6c0e70b56932d8864f35df1eb7a18bb759b3a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/LICENSE.txt b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/LICENSE.txt new file mode 100644 index 0000000..e497a32 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/LICENSE.txt @@ -0,0 +1,13 @@ + Copyright aio-libs contributors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/METADATA b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/METADATA new file mode 100644 index 0000000..3646959 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/METADATA @@ -0,0 +1,253 @@ +Metadata-Version: 2.1 +Name: aiohttp +Version: 3.8.3 +Summary: Async http client/server framework (asyncio) +Home-page: https://github.com/aio-libs/aiohttp +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache 2 +Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby +Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp +Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html +Project-URL: Docs: RTD, https://docs.aiohttp.org +Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: AsyncIO +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Requires-Dist: attrs (>=17.3.0) +Requires-Dist: charset-normalizer (<3.0,>=2.0) +Requires-Dist: multidict (<7.0,>=4.5) +Requires-Dist: async-timeout (<5.0,>=4.0.0a3) +Requires-Dist: yarl (<2.0,>=1.0) +Requires-Dist: frozenlist (>=1.1.1) +Requires-Dist: aiosignal (>=1.1.2) +Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7" +Requires-Dist: asynctest (==0.13.0) ; python_version < "3.8" +Requires-Dist: typing-extensions (>=3.7.4) ; python_version < "3.8" +Provides-Extra: speedups +Requires-Dist: aiodns ; extra == 'speedups' +Requires-Dist: Brotli ; extra == 'speedups' +Requires-Dist: cchardet ; (python_version < "3.10") and extra == 'speedups' + +================================== +Async http client/server framework +================================== + +.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg + :height: 64px + :width: 64px + :alt: aiohttp logo + +| + +.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg + :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI + :alt: GitHub Actions status for master branch + +.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/aiohttp + :alt: codecov.io status for master branch + +.. image:: https://badge.fury.io/py/aiohttp.svg + :target: https://pypi.org/project/aiohttp + :alt: Latest PyPI package version + +.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest + :target: https://docs.aiohttp.org/ + :alt: Latest Read The Docs + +.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group + :target: https://aio-libs.discourse.group + :alt: Discourse status + +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + + +Key Features +============ + +- Supports both client and server side of HTTP protocol. +- Supports both client and server Web-Sockets out-of-the-box and avoids + Callback Hell. +- Provides Web-server with middlewares and plugable routing. + + +Getting started +=============== + +Client +------ + +To get something from the web: + +.. code-block:: python + + import aiohttp + import asyncio + + async def main(): + + async with aiohttp.ClientSession() as session: + async with session.get('http://python.org') as response: + + print("Status:", response.status) + print("Content-type:", response.headers['content-type']) + + html = await response.text() + print("Body:", html[:15], "...") + + asyncio.run(main()) + +This prints: + +.. code-block:: + + Status: 200 + Content-type: text/html; charset=utf-8 + Body: ... + +Coming from `requests `_ ? Read `why we need so many lines `_. + +Server +------ + +An example using a simple server: + +.. code-block:: python + + # examples/server_simple.py + from aiohttp import web + + async def handle(request): + name = request.match_info.get('name', "Anonymous") + text = "Hello, " + name + return web.Response(text=text) + + async def wshandle(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + + async for msg in ws: + if msg.type == web.WSMsgType.text: + await ws.send_str("Hello, {}".format(msg.data)) + elif msg.type == web.WSMsgType.binary: + await ws.send_bytes(msg.data) + elif msg.type == web.WSMsgType.close: + break + + return ws + + + app = web.Application() + app.add_routes([web.get('/', handle), + web.get('/echo', wshandle), + web.get('/{name}', handle)]) + + if __name__ == '__main__': + web.run_app(app) + + +Documentation +============= + +https://aiohttp.readthedocs.io/ + + +Demos +===== + +https://github.com/aio-libs/aiohttp-demos + + +External links +============== + +* `Third party libraries + `_ +* `Built with aiohttp + `_ +* `Powered by aiohttp + `_ + +Feel free to make a Pull Request for adding your link to these pages! + + +Communication channels +====================== + +*aio-libs discourse group*: https://aio-libs.discourse.group + +*gitter chat* https://gitter.im/aio-libs/Lobby + +We support `Stack Overflow +`_. +Please add *aiohttp* tag to your question there. + +Requirements +============ + +- Python >= 3.6 +- async-timeout_ +- attrs_ +- charset-normalizer_ +- multidict_ +- yarl_ +- frozenlist_ + +Optionally you may install the cChardet_ and aiodns_ libraries (highly +recommended for sake of speed). + +.. _charset-normalizer: https://pypi.org/project/charset-normalizer +.. _aiodns: https://pypi.python.org/pypi/aiodns +.. _attrs: https://github.com/python-attrs/attrs +.. _multidict: https://pypi.python.org/pypi/multidict +.. _frozenlist: https://pypi.org/project/frozenlist/ +.. _yarl: https://pypi.python.org/pypi/yarl +.. _async-timeout: https://pypi.python.org/pypi/async_timeout +.. _cChardet: https://pypi.python.org/pypi/cchardet + +License +======= + +``aiohttp`` is offered under the Apache 2 license. + + +Keepsafe +======== + +The aiohttp community would like to thank Keepsafe +(https://www.getkeepsafe.com) for its support in the early days of +the project. + + +Source code +=========== + +The latest developer version is available in a GitHub repository: +https://github.com/aio-libs/aiohttp + +Benchmarks +========== + +If you are interested in efficiency, the AsyncIO community maintains a +list of benchmarks on the official wiki: +https://github.com/python/asyncio/wiki/Benchmarks diff --git a/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/RECORD b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/RECORD new file mode 100644 index 0000000..e825de9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/RECORD @@ -0,0 +1,119 @@ +aiohttp-3.8.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aiohttp-3.8.3.dist-info/LICENSE.txt,sha256=n4DQ2311WpQdtFchcsJw7L2PCCuiFd3QlZhZQu2Uqes,588 +aiohttp-3.8.3.dist-info/METADATA,sha256=tGcxB-KseArVRgBQmGjX9g9YQehDdkqeIAeueYZI4-I,7355 +aiohttp-3.8.3.dist-info/RECORD,, +aiohttp-3.8.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +aiohttp-3.8.3.dist-info/WHEEL,sha256=-ijGDuALlPxm3HbhKntps0QzHsi-DPlXqgerYTTJkFE,148 +aiohttp-3.8.3.dist-info/direct_url.json,sha256=cI0bJ0cmrFshYem1JVJziUFicN7pG9XMhiQawMGz0qg,289 +aiohttp-3.8.3.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8 +aiohttp/.hash/_cparser.pxd.hash,sha256=GoFy-KArtO3unhO5IuosMnc-wwcx7yofVZp2gJi_n_Y,121 +aiohttp/.hash/_find_header.pxd.hash,sha256=_mbpD6vM-CVCKq3ulUvsOAz5Wdo88wrDzfpOsMQaMNA,125 +aiohttp/.hash/_helpers.pyi.hash,sha256=Ew4BZDc2LqFwszgZZUHHrJvw5P8HBhJ700n1Ntg52hE,121 +aiohttp/.hash/_helpers.pyx.hash,sha256=5JQ6BlMBE4HnRaCGdkK9_wpL3ZSWpU1gyLYva0Wwx2c,121 +aiohttp/.hash/_http_parser.pyx.hash,sha256=pEF-JTzd1dVYEwfuzUiTtp8aekvrhhOwiFi4vELWcsM,125 +aiohttp/.hash/_http_writer.pyx.hash,sha256=3Qg3T3D-Ud73elzPHBufK0yEu9tP5jsu6g-aPKQY9gE,125 +aiohttp/.hash/_websocket.pyx.hash,sha256=M97f-Yti-4vnE4GNTD1s_DzKs-fG_ww3jle6EUvixnE,123 +aiohttp/.hash/hdrs.py.hash,sha256=KpTaDTcWfiQrW2QPA5glgIfw6o5JC1hsAYZHeFMuBnI,116 +aiohttp/__init__.py,sha256=8k5pIKoY4JIJImY1z9sSjhGNbZzRmPazjF3TcbvDIIw,6870 +aiohttp/__pycache__/__init__.cpython-38.pyc,, +aiohttp/__pycache__/abc.cpython-38.pyc,, +aiohttp/__pycache__/base_protocol.cpython-38.pyc,, +aiohttp/__pycache__/client.cpython-38.pyc,, +aiohttp/__pycache__/client_exceptions.cpython-38.pyc,, +aiohttp/__pycache__/client_proto.cpython-38.pyc,, +aiohttp/__pycache__/client_reqrep.cpython-38.pyc,, +aiohttp/__pycache__/client_ws.cpython-38.pyc,, +aiohttp/__pycache__/connector.cpython-38.pyc,, +aiohttp/__pycache__/cookiejar.cpython-38.pyc,, +aiohttp/__pycache__/formdata.cpython-38.pyc,, +aiohttp/__pycache__/hdrs.cpython-38.pyc,, +aiohttp/__pycache__/helpers.cpython-38.pyc,, +aiohttp/__pycache__/http.cpython-38.pyc,, +aiohttp/__pycache__/http_exceptions.cpython-38.pyc,, +aiohttp/__pycache__/http_parser.cpython-38.pyc,, +aiohttp/__pycache__/http_websocket.cpython-38.pyc,, +aiohttp/__pycache__/http_writer.cpython-38.pyc,, +aiohttp/__pycache__/locks.cpython-38.pyc,, +aiohttp/__pycache__/log.cpython-38.pyc,, +aiohttp/__pycache__/multipart.cpython-38.pyc,, +aiohttp/__pycache__/payload.cpython-38.pyc,, +aiohttp/__pycache__/payload_streamer.cpython-38.pyc,, +aiohttp/__pycache__/pytest_plugin.cpython-38.pyc,, +aiohttp/__pycache__/resolver.cpython-38.pyc,, +aiohttp/__pycache__/streams.cpython-38.pyc,, +aiohttp/__pycache__/tcp_helpers.cpython-38.pyc,, +aiohttp/__pycache__/test_utils.cpython-38.pyc,, +aiohttp/__pycache__/tracing.cpython-38.pyc,, +aiohttp/__pycache__/typedefs.cpython-38.pyc,, +aiohttp/__pycache__/web.cpython-38.pyc,, +aiohttp/__pycache__/web_app.cpython-38.pyc,, +aiohttp/__pycache__/web_exceptions.cpython-38.pyc,, +aiohttp/__pycache__/web_fileresponse.cpython-38.pyc,, +aiohttp/__pycache__/web_log.cpython-38.pyc,, +aiohttp/__pycache__/web_middlewares.cpython-38.pyc,, +aiohttp/__pycache__/web_protocol.cpython-38.pyc,, +aiohttp/__pycache__/web_request.cpython-38.pyc,, +aiohttp/__pycache__/web_response.cpython-38.pyc,, +aiohttp/__pycache__/web_routedef.cpython-38.pyc,, +aiohttp/__pycache__/web_runner.cpython-38.pyc,, +aiohttp/__pycache__/web_server.cpython-38.pyc,, +aiohttp/__pycache__/web_urldispatcher.cpython-38.pyc,, +aiohttp/__pycache__/web_ws.cpython-38.pyc,, +aiohttp/__pycache__/worker.cpython-38.pyc,, +aiohttp/_cparser.pxd,sha256=5tE01W1fUWqytcOyldDUQKO--RH0OE1QYgQBiJWh-DM,4998 +aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68 +aiohttp/_headers.pxi,sha256=n701k28dVPjwRnx5j6LpJhLTfj7dqu2vJt7f0O60Oyg,2007 +aiohttp/_helpers.cpython-38-x86_64-linux-gnu.so,sha256=P2bi11aP0JO44PSfu8MfQbRcpujnFMLlg4qJxOKdQ5I,353856 +aiohttp/_helpers.pyi,sha256=ZoKiJSS51PxELhI2cmIr5737YjjZcJt7FbIRO3ym1Ss,202 +aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049 +aiohttp/_http_parser.cpython-38-x86_64-linux-gnu.so,sha256=H7HVFIglsG77TAoY8riONXIZBz9gJtTxHMhxlXMepO8,2382200 +aiohttp/_http_parser.pyx,sha256=1u38_ESw5VgSqajx1mnGdO6Hqk0ov9PxeFreHq4ktoM,27336 +aiohttp/_http_writer.cpython-38-x86_64-linux-gnu.so,sha256=1CrwAf6ukB-bnc2grlew6P59rznaps1UvXN2_QsLg5Y,331288 +aiohttp/_http_writer.pyx,sha256=aIHAp8g4ZV5kbGRdmZce-vXjELw2M6fGKyJuOdgYQqw,4575 +aiohttp/_websocket.cpython-38-x86_64-linux-gnu.so,sha256=WhkckbcHcqC9PdSMQ_HLmEUCH3aPnGv0drG17Pa1BNk,124128 +aiohttp/_websocket.pyx,sha256=1XuOSNDCbyDrzF5uMA2isqausSs8l2jWTLDlNDLM9Io,1561 +aiohttp/abc.py,sha256=0FhHtbb3W7wRNtJISACN1teP8LZ49553v5Xoh5zNAFQ,5505 +aiohttp/base_protocol.py,sha256=XBq6YcLl7nwKJPIBKf2b1EobE0v7EBeZAoDJCkOO2eo,2676 +aiohttp/client.py,sha256=5dTKnaqzZvbEjd4M6yBOhDDR1uErDKu_xI3xGlzzYjs,45037 +aiohttp/client_exceptions.py,sha256=tiaUIb2xy3s-O-KTPVL6L_P0rpGQT0rV1dujwwgJKoI,9270 +aiohttp/client_proto.py,sha256=c4TAK8CVdycukenCJj7LtlQ3SEj04ilJ3DfmN3kPgeE,8170 +aiohttp/client_reqrep.py,sha256=ULqrco544ZQgYruj1mFD6Fd3af2fOZWJqn07R8rB5J8,36973 +aiohttp/client_ws.py,sha256=Sc0u3S-vZMadtEO6JpbLhVVw7KgtlsgZWHwaSYjkN0I,10516 +aiohttp/connector.py,sha256=0EPxWYzIF3UudRFL77QBVico5EfSMg9mPoy75m1aBhw,51177 +aiohttp/cookiejar.py,sha256=6Y8F3H19q81WypX7EmWR4eQvTc0Pj8QXdv20xM8BUlE,13514 +aiohttp/formdata.py,sha256=q2gpeiM9NFsl_eSFVxHZ7Qez6RbM8_BujERMkooQkx0,6106 +aiohttp/hdrs.py,sha256=owNRw0dgodeDWyobBVLkY88dLbkNoM20czE9xm40oDE,4724 +aiohttp/helpers.py,sha256=PwEnFh_jHJXr6fcM_QwEY3sSibVx8FgjdO37So19lqA,26361 +aiohttp/http.py,sha256=_B20NZc113uNtg0jabY-x4_3RrIpTsqmbRIwMcm-Bco,1800 +aiohttp/http_exceptions.py,sha256=eACQt7azwNbUi8aqXB5J2tIcc_CB4_4y4mLM9SZd4tM,2586 +aiohttp/http_parser.py,sha256=_8ESr_Qo22D7tsLtmzJHK2vysqgbI06WWiGmPm5fjWc,33092 +aiohttp/http_websocket.py,sha256=X6kzIgu0-wRLU9yQxP4iH-Hv_36uVjTCOmF2HgpZLlk,25299 +aiohttp/http_writer.py,sha256=Lia-FlcFvzvr0riJHZrvKFuzLzf2drWchEQ8vew5CJ0,5952 +aiohttp/locks.py,sha256=wRYFo1U82LwBBdqwU24JEPaoTAlKaaJd2FtfDKhkTb4,1136 +aiohttp/log.py,sha256=BbNKx9e3VMIm0xYjZI0IcBBoS7wjdeIeSaiJE7-qK2g,325 +aiohttp/multipart.py,sha256=gmqFziP4ou8ZuoAOibOjoW7OJOsURzI5gkxoLPARQy8,32313 +aiohttp/payload.py,sha256=rachrZ66vC90f7swPXINaatGpPcCSVkiCCVKhrCFfuw,13634 +aiohttp/payload_streamer.py,sha256=3WmZ77SQ4dc8aKU6i2ZAb8L7FPdesCSoqRonSVOd_kE,2112 +aiohttp/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +aiohttp/pytest_plugin.py,sha256=4-5LfdrnZIBP2wLp8CjE54eshOolFbBpOTufvp4tUcI,11772 +aiohttp/resolver.py,sha256=CASOnXp5oZh_1sCFWzFlD-5x3V49HAXbAJw-5RYjgms,5092 +aiohttp/streams.py,sha256=_OTvFQVA8-8GJ120Y95lH-hmLq1QaFNBFdaA49TECIc,20758 +aiohttp/tcp_helpers.py,sha256=BSadqVWaBpMFDRWnhaaR941N9MiDZ7bdTrxgCb0CW-M,961 +aiohttp/test_utils.py,sha256=MNQb4Zq6rKLLC3PABy5hIjONlsoxd-lc3OirNGHIJS4,21434 +aiohttp/tracing.py,sha256=gn_O9btTDB66nQ1wJWT3N2gEsO5kYFSIynnd8cp4YgA,15177 +aiohttp/typedefs.py,sha256=oLnG3fxcBEdu4kIzUi0Npcg5kjq01cRkK27VSgjNnmw,1766 +aiohttp/web.py,sha256=bAhkE0oeP6eI06ohkBoPu4ZLchctpEz23cVym_JECSg,18081 +aiohttp/web_app.py,sha256=QkVmy8pR_oaJ3od2fYfSfjzfZ8oGEPg9FPW_d0r2THo,17170 +aiohttp/web_exceptions.py,sha256=T_ghTJB_hnPkoWa3qyeY_GtVbehYQwPCpcCRb-072N0,10098 +aiohttp/web_fileresponse.py,sha256=F_xRvYFL2ox4oVNW3WSjwQ6LmVpkkYM8MPxsqiNsfUg,10784 +aiohttp/web_log.py,sha256=OH-C5shv59-nXchWX8owfLfToMxVdtj0PuK3uGIGEJQ,7557 +aiohttp/web_middlewares.py,sha256=nnllFgxX9GjvkG3YW43jPDH7C03iCfyMBxhYw-OkTI0,4137 +aiohttp/web_protocol.py,sha256=EFr0sy29rW7ffRz-tlRlBnHogmlyt6YvaJP6X1Sg3i8,22399 +aiohttp/web_request.py,sha256=t0RvF_yOJG6uq9-nZjmwXjfqc9Mvgpc3sXUxC67uGvw,28187 +aiohttp/web_response.py,sha256=7WTmyeXY2DrWAhr9HWuxY1SecgxiO_QwRql86PSUS-U,27471 +aiohttp/web_routedef.py,sha256=EFk3v1dcFnLimTT5z0JSBO3PShF0w9sIzfK9iJd-LNs,6152 +aiohttp/web_runner.py,sha256=EzxH4v1lntydU3W-c6iLgDu5LI7kAyD7sAmkz6W5-9g,11157 +aiohttp/web_server.py,sha256=EA1YUxv_4szhpaED1O_VVCUFHNhPUJhl2Cq7W1BK72s,2050 +aiohttp/web_urldispatcher.py,sha256=IAvlOBqCPLjasMnYtCwSqbhj-j1JTQRRHFnNvMFd4d4,39483 +aiohttp/web_ws.py,sha256=8Qmp_zH-F7t9V4NLRFwfoTBr4oWuo3cZrnYT-i-zBI0,17144 +aiohttp/worker.py,sha256=Cbx1KyVligvWa6kje0hSXhdjgJ1AORLN1qu8qJ0LzSQ,8763 diff --git a/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/WHEEL b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/WHEEL new file mode 120000 index 0000000..8b6429d --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fa/28/c6/0ee00b94fc66dc76e12a7b69b344331ec8be0cf957aa07ab6134c99051 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/direct_url.json new file mode 100644 index 0000000..95a7fe2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=87f44875f2804bc0511a69ce44a9595d5944837a62caecc8490bbdb0e18b1342"}, "url": "https://files.pythonhosted.org/packages/af/d6/248ad502c6049011e7851e1474dd0a58175895388bed15f7f67dcb9187d9/aiohttp-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/top_level.txt new file mode 120000 index 0000000..f24d800 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp-3.8.3.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8a/ff/89/21a69c99397e852868dd09a985c2a76d1458c75b2de3bca3cffd7bf11a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/.hash/_cparser.pxd.hash b/venv/lib/python3.8/site-packages/aiohttp/.hash/_cparser.pxd.hash new file mode 120000 index 0000000..7d05d43 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/.hash/_cparser.pxd.hash @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1a/81/72/f8a02bb4edee9e13b922ea2c32773ec30731ef2a1f559a768098bf9ff6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/.hash/_find_header.pxd.hash b/venv/lib/python3.8/site-packages/aiohttp/.hash/_find_header.pxd.hash new file mode 120000 index 0000000..c923075 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/.hash/_find_header.pxd.hash @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fe/66/e9/0fabccf825422aadee954bec380cf959da3cf30ac3cdfa4eb0c41a30d0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyi.hash b/venv/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyi.hash new file mode 120000 index 0000000..6ba80e4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyi.hash @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/13/0e/01/6437362ea170b338196541c7ac9bf0e4ff0706127bd349f536d839da11 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyx.hash b/venv/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyx.hash new file mode 120000 index 0000000..a1c4567 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyx.hash @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e4/94/3a/0653011381e745a0867642bdff0a4bdd9496a54d60c8b62f6b45b0c767 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/.hash/_http_parser.pyx.hash b/venv/lib/python3.8/site-packages/aiohttp/.hash/_http_parser.pyx.hash new file mode 100644 index 0000000..7a414c4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/.hash/_http_parser.pyx.hash @@ -0,0 +1 @@ +d6edfcfc44b0e55812a9a8f1d669c674ee87aa4d28bfd3f1785ade1eae24b683 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx diff --git a/venv/lib/python3.8/site-packages/aiohttp/.hash/_http_writer.pyx.hash b/venv/lib/python3.8/site-packages/aiohttp/.hash/_http_writer.pyx.hash new file mode 120000 index 0000000..a62a05f --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/.hash/_http_writer.pyx.hash @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/dd/08/37/4f70fe51def77a5ccf1c1b9f2b4c84bbdb4fe63b2eea0f9a3ca418f601 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/.hash/_websocket.pyx.hash b/venv/lib/python3.8/site-packages/aiohttp/.hash/_websocket.pyx.hash new file mode 120000 index 0000000..cfb0f2b --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/.hash/_websocket.pyx.hash @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/33/de/df/f98b62fb8be713818d4c3d6cfc3ccab3e7c6ff0c378e57ba114be2c671 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/.hash/hdrs.py.hash b/venv/lib/python3.8/site-packages/aiohttp/.hash/hdrs.py.hash new file mode 120000 index 0000000..f8dccee --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/.hash/hdrs.py.hash @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2a/94/da/0d37167e242b5b640f0398258087f0ea8e490b586c01864778532e0672 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/__init__.py b/venv/lib/python3.8/site-packages/aiohttp/__init__.py new file mode 100644 index 0000000..b407fec --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/__init__.py @@ -0,0 +1,216 @@ +__version__ = "3.8.3" + +from typing import Tuple + +from . import hdrs as hdrs +from .client import ( + BaseConnector as BaseConnector, + ClientConnectionError as ClientConnectionError, + ClientConnectorCertificateError as ClientConnectorCertificateError, + ClientConnectorError as ClientConnectorError, + ClientConnectorSSLError as ClientConnectorSSLError, + ClientError as ClientError, + ClientHttpProxyError as ClientHttpProxyError, + ClientOSError as ClientOSError, + ClientPayloadError as ClientPayloadError, + ClientProxyConnectionError as ClientProxyConnectionError, + ClientRequest as ClientRequest, + ClientResponse as ClientResponse, + ClientResponseError as ClientResponseError, + ClientSession as ClientSession, + ClientSSLError as ClientSSLError, + ClientTimeout as ClientTimeout, + ClientWebSocketResponse as ClientWebSocketResponse, + ContentTypeError as ContentTypeError, + Fingerprint as Fingerprint, + InvalidURL as InvalidURL, + NamedPipeConnector as NamedPipeConnector, + RequestInfo as RequestInfo, + ServerConnectionError as ServerConnectionError, + ServerDisconnectedError as ServerDisconnectedError, + ServerFingerprintMismatch as ServerFingerprintMismatch, + ServerTimeoutError as ServerTimeoutError, + TCPConnector as TCPConnector, + TooManyRedirects as TooManyRedirects, + UnixConnector as UnixConnector, + WSServerHandshakeError as WSServerHandshakeError, + request as request, +) +from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar +from .formdata import FormData as FormData +from .helpers import BasicAuth, ChainMapProxy, ETag +from .http import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + WebSocketError as WebSocketError, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, +) +from .multipart import ( + BadContentDispositionHeader as BadContentDispositionHeader, + BadContentDispositionParam as BadContentDispositionParam, + BodyPartReader as BodyPartReader, + MultipartReader as MultipartReader, + MultipartWriter as MultipartWriter, + content_disposition_filename as content_disposition_filename, + parse_content_disposition as parse_content_disposition, +) +from .payload import ( + PAYLOAD_REGISTRY as PAYLOAD_REGISTRY, + AsyncIterablePayload as AsyncIterablePayload, + BufferedReaderPayload as BufferedReaderPayload, + BytesIOPayload as BytesIOPayload, + BytesPayload as BytesPayload, + IOBasePayload as IOBasePayload, + JsonPayload as JsonPayload, + Payload as Payload, + StringIOPayload as StringIOPayload, + StringPayload as StringPayload, + TextIOPayload as TextIOPayload, + get_payload as get_payload, + payload_type as payload_type, +) +from .payload_streamer import streamer as streamer +from .resolver import ( + AsyncResolver as AsyncResolver, + DefaultResolver as DefaultResolver, + ThreadedResolver as ThreadedResolver, +) +from .streams import ( + EMPTY_PAYLOAD as EMPTY_PAYLOAD, + DataQueue as DataQueue, + EofStream as EofStream, + FlowControlDataQueue as FlowControlDataQueue, + StreamReader as StreamReader, +) +from .tracing import ( + TraceConfig as TraceConfig, + TraceConnectionCreateEndParams as TraceConnectionCreateEndParams, + TraceConnectionCreateStartParams as TraceConnectionCreateStartParams, + TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams, + TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams, + TraceConnectionReuseconnParams as TraceConnectionReuseconnParams, + TraceDnsCacheHitParams as TraceDnsCacheHitParams, + TraceDnsCacheMissParams as TraceDnsCacheMissParams, + TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams, + TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams, + TraceRequestChunkSentParams as TraceRequestChunkSentParams, + TraceRequestEndParams as TraceRequestEndParams, + TraceRequestExceptionParams as TraceRequestExceptionParams, + TraceRequestRedirectParams as TraceRequestRedirectParams, + TraceRequestStartParams as TraceRequestStartParams, + TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, +) + +__all__: Tuple[str, ...] = ( + "hdrs", + # client + "BaseConnector", + "ClientConnectionError", + "ClientConnectorCertificateError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponse", + "ClientRequest", + "ClientResponseError", + "ClientSSLError", + "ClientSession", + "ClientTimeout", + "ClientWebSocketResponse", + "ContentTypeError", + "Fingerprint", + "InvalidURL", + "RequestInfo", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "TCPConnector", + "TooManyRedirects", + "UnixConnector", + "NamedPipeConnector", + "WSServerHandshakeError", + "request", + # cookiejar + "CookieJar", + "DummyCookieJar", + # formdata + "FormData", + # helpers + "BasicAuth", + "ChainMapProxy", + "ETag", + # http + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + "WSMsgType", + "WSCloseCode", + "WSMessage", + "WebSocketError", + # multipart + "BadContentDispositionHeader", + "BadContentDispositionParam", + "BodyPartReader", + "MultipartReader", + "MultipartWriter", + "content_disposition_filename", + "parse_content_disposition", + # payload + "AsyncIterablePayload", + "BufferedReaderPayload", + "BytesIOPayload", + "BytesPayload", + "IOBasePayload", + "JsonPayload", + "PAYLOAD_REGISTRY", + "Payload", + "StringIOPayload", + "StringPayload", + "TextIOPayload", + "get_payload", + "payload_type", + # payload_streamer + "streamer", + # resolver + "AsyncResolver", + "DefaultResolver", + "ThreadedResolver", + # streams + "DataQueue", + "EMPTY_PAYLOAD", + "EofStream", + "FlowControlDataQueue", + "StreamReader", + # tracing + "TraceConfig", + "TraceConnectionCreateEndParams", + "TraceConnectionCreateStartParams", + "TraceConnectionQueuedEndParams", + "TraceConnectionQueuedStartParams", + "TraceConnectionReuseconnParams", + "TraceDnsCacheHitParams", + "TraceDnsCacheMissParams", + "TraceDnsResolveHostEndParams", + "TraceDnsResolveHostStartParams", + "TraceRequestChunkSentParams", + "TraceRequestEndParams", + "TraceRequestExceptionParams", + "TraceRequestRedirectParams", + "TraceRequestStartParams", + "TraceResponseChunkReceivedParams", +) + +try: + from .worker import GunicornUVLoopWebWorker, GunicornWebWorker + + __all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker") +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..c82ea0d Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/abc.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/abc.cpython-38.pyc new file mode 100644 index 0000000..2ea7b22 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/abc.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/base_protocol.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/base_protocol.cpython-38.pyc new file mode 100644 index 0000000..e85d829 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/base_protocol.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client.cpython-38.pyc new file mode 100644 index 0000000..3f2e26e Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_exceptions.cpython-38.pyc new file mode 100644 index 0000000..33230d7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_proto.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_proto.cpython-38.pyc new file mode 100644 index 0000000..03481ce Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_proto.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_reqrep.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_reqrep.cpython-38.pyc new file mode 100644 index 0000000..e970a5d Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_reqrep.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_ws.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_ws.cpython-38.pyc new file mode 100644 index 0000000..4a0159b Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/client_ws.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/connector.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/connector.cpython-38.pyc new file mode 100644 index 0000000..7c22045 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/connector.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/cookiejar.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/cookiejar.cpython-38.pyc new file mode 100644 index 0000000..262a3f0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/cookiejar.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/formdata.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/formdata.cpython-38.pyc new file mode 100644 index 0000000..c0acff9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/formdata.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/hdrs.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/hdrs.cpython-38.pyc new file mode 100644 index 0000000..1596258 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/hdrs.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/helpers.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/helpers.cpython-38.pyc new file mode 100644 index 0000000..f960058 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/helpers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http.cpython-38.pyc new file mode 100644 index 0000000..0bfda46 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_exceptions.cpython-38.pyc new file mode 100644 index 0000000..352cc76 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_parser.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_parser.cpython-38.pyc new file mode 100644 index 0000000..a83fc23 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_parser.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_websocket.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_websocket.cpython-38.pyc new file mode 100644 index 0000000..424509f Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_websocket.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_writer.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_writer.cpython-38.pyc new file mode 100644 index 0000000..af03651 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/http_writer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/locks.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/locks.cpython-38.pyc new file mode 100644 index 0000000..1cc051e Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/locks.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/log.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/log.cpython-38.pyc new file mode 100644 index 0000000..5522b42 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/log.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/multipart.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/multipart.cpython-38.pyc new file mode 100644 index 0000000..3e7bb0f Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/multipart.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/payload.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/payload.cpython-38.pyc new file mode 100644 index 0000000..40f4aaf Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/payload.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/payload_streamer.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/payload_streamer.cpython-38.pyc new file mode 100644 index 0000000..2a0315f Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/payload_streamer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-38.pyc new file mode 100644 index 0000000..c982789 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/resolver.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/resolver.cpython-38.pyc new file mode 100644 index 0000000..034b721 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/resolver.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/streams.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/streams.cpython-38.pyc new file mode 100644 index 0000000..2faf31e Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/streams.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-38.pyc new file mode 100644 index 0000000..3d4b6be Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/test_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/test_utils.cpython-38.pyc new file mode 100644 index 0000000..9a54c72 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/test_utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/tracing.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/tracing.cpython-38.pyc new file mode 100644 index 0000000..5e550d8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/tracing.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/typedefs.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/typedefs.cpython-38.pyc new file mode 100644 index 0000000..23b7eb9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/typedefs.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web.cpython-38.pyc new file mode 100644 index 0000000..a2a0061 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_app.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_app.cpython-38.pyc new file mode 100644 index 0000000..ad131d2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_app.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_exceptions.cpython-38.pyc new file mode 100644 index 0000000..2d7a949 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-38.pyc new file mode 100644 index 0000000..555e298 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_log.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_log.cpython-38.pyc new file mode 100644 index 0000000..b15e041 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_log.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_middlewares.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_middlewares.cpython-38.pyc new file mode 100644 index 0000000..079c175 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_middlewares.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_protocol.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_protocol.cpython-38.pyc new file mode 100644 index 0000000..be2d1ea Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_protocol.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_request.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_request.cpython-38.pyc new file mode 100644 index 0000000..1cb54b3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_request.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_response.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_response.cpython-38.pyc new file mode 100644 index 0000000..1f66c93 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_response.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_routedef.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_routedef.cpython-38.pyc new file mode 100644 index 0000000..6718d62 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_routedef.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_runner.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_runner.cpython-38.pyc new file mode 100644 index 0000000..2b0472c Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_runner.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_server.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_server.cpython-38.pyc new file mode 100644 index 0000000..7a6214a Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_server.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-38.pyc new file mode 100644 index 0000000..ec25f1b Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_ws.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_ws.cpython-38.pyc new file mode 100644 index 0000000..f2df5b8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/web_ws.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/__pycache__/worker.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/worker.cpython-38.pyc new file mode 100644 index 0000000..50eaad3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/__pycache__/worker.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/_cparser.pxd b/venv/lib/python3.8/site-packages/aiohttp/_cparser.pxd new file mode 120000 index 0000000..dbe26bd --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/_cparser.pxd @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e6/d1/34/d56d5f516ab2b5c3b295d0d440a3bef911f4384d506204018895a1f833 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/_find_header.pxd b/venv/lib/python3.8/site-packages/aiohttp/_find_header.pxd new file mode 120000 index 0000000..43840a8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/_find_header.pxd @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d0/67/f0/1423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/_headers.pxi b/venv/lib/python3.8/site-packages/aiohttp/_headers.pxi new file mode 120000 index 0000000..990edcf --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/_headers.pxi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9f/bd/35/936f1d54f8f0467c798fa2e92612d37e3eddaaedaf26dedfd0eeb43b28 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/_helpers.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/aiohttp/_helpers.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..ebe4752 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/_helpers.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/_helpers.pyi b/venv/lib/python3.8/site-packages/aiohttp/_helpers.pyi new file mode 120000 index 0000000..2387471 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/_helpers.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/66/82/a2/2524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/_helpers.pyx b/venv/lib/python3.8/site-packages/aiohttp/_helpers.pyx new file mode 120000 index 0000000..2694576 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/_helpers.pyx @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5d/e2/db/35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/_http_parser.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/aiohttp/_http_parser.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..c9f3930 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/_http_parser.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/_http_parser.pyx b/venv/lib/python3.8/site-packages/aiohttp/_http_parser.pyx new file mode 100644 index 0000000..bebd989 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/_http_parser.pyx @@ -0,0 +1,832 @@ +#cython: language_level=3 +# +# Based on https://github.com/MagicStack/httptools +# +from __future__ import absolute_import, print_function + +from cpython cimport ( + Py_buffer, + PyBUF_SIMPLE, + PyBuffer_Release, + PyBytes_AsString, + PyBytes_AsStringAndSize, + PyObject_GetBuffer, +) +from cpython.mem cimport PyMem_Free, PyMem_Malloc +from libc.limits cimport ULLONG_MAX +from libc.string cimport memcpy + +from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy +from yarl import URL as _URL + +from aiohttp import hdrs + +from .http_exceptions import ( + BadHttpMessage, + BadStatusLine, + ContentLengthError, + InvalidHeader, + InvalidURLError, + LineTooLong, + PayloadEncodingError, + TransferEncodingError, +) +from .http_parser import DeflateBuffer as _DeflateBuffer +from .http_writer import ( + HttpVersion as _HttpVersion, + HttpVersion10 as _HttpVersion10, + HttpVersion11 as _HttpVersion11, +) +from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader + +cimport cython + +from aiohttp cimport _cparser as cparser + +include "_headers.pxi" + +from aiohttp cimport _find_header + +DEF DEFAULT_FREELIST_SIZE = 250 + +cdef extern from "Python.h": + int PyByteArray_Resize(object, Py_ssize_t) except -1 + Py_ssize_t PyByteArray_Size(object) except -1 + char* PyByteArray_AsString(object) + +__all__ = ('HttpRequestParser', 'HttpResponseParser', + 'RawRequestMessage', 'RawResponseMessage') + +cdef object URL = _URL +cdef object URL_build = URL.build +cdef object CIMultiDict = _CIMultiDict +cdef object CIMultiDictProxy = _CIMultiDictProxy +cdef object HttpVersion = _HttpVersion +cdef object HttpVersion10 = _HttpVersion10 +cdef object HttpVersion11 = _HttpVersion11 +cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 +cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING +cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD +cdef object StreamReader = _StreamReader +cdef object DeflateBuffer = _DeflateBuffer + + +cdef inline object extend(object buf, const char* at, size_t length): + cdef Py_ssize_t s + cdef char* ptr + s = PyByteArray_Size(buf) + PyByteArray_Resize(buf, s + length) + ptr = PyByteArray_AsString(buf) + memcpy(ptr + s, at, length) + + +DEF METHODS_COUNT = 46; + +cdef list _http_method = [] + +for i in range(METHODS_COUNT): + _http_method.append( + cparser.llhttp_method_name( i).decode('ascii')) + + +cdef inline str http_method_str(int i): + if i < METHODS_COUNT: + return _http_method[i] + else: + return "" + +cdef inline object find_header(bytes raw_header): + cdef Py_ssize_t size + cdef char *buf + cdef int idx + PyBytes_AsStringAndSize(raw_header, &buf, &size) + idx = _find_header.find_header(buf, size) + if idx == -1: + return raw_header.decode('utf-8', 'surrogateescape') + return headers[idx] + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawRequestMessage: + cdef readonly str method + cdef readonly str path + cdef readonly object version # HttpVersion + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + cdef readonly object url # yarl.URL + + def __init__(self, method, path, version, headers, raw_headers, + should_close, compression, upgrade, chunked, url): + self.method = method + self.path = path + self.version = version + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + self.url = url + + def __repr__(self): + info = [] + info.append(("method", self.method)) + info.append(("path", self.path)) + info.append(("version", self.version)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + info.append(("url", self.url)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + def _replace(self, **dct): + cdef RawRequestMessage ret + ret = _new_request_message(self.method, + self.path, + self.version, + self.headers, + self.raw_headers, + self.should_close, + self.compression, + self.upgrade, + self.chunked, + self.url) + if "method" in dct: + ret.method = dct["method"] + if "path" in dct: + ret.path = dct["path"] + if "version" in dct: + ret.version = dct["version"] + if "headers" in dct: + ret.headers = dct["headers"] + if "raw_headers" in dct: + ret.raw_headers = dct["raw_headers"] + if "should_close" in dct: + ret.should_close = dct["should_close"] + if "compression" in dct: + ret.compression = dct["compression"] + if "upgrade" in dct: + ret.upgrade = dct["upgrade"] + if "chunked" in dct: + ret.chunked = dct["chunked"] + if "url" in dct: + ret.url = dct["url"] + return ret + +cdef _new_request_message(str method, + str path, + object version, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked, + object url): + cdef RawRequestMessage ret + ret = RawRequestMessage.__new__(RawRequestMessage) + ret.method = method + ret.path = path + ret.version = version + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + ret.url = url + return ret + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawResponseMessage: + cdef readonly object version # HttpVersion + cdef readonly int code + cdef readonly str reason + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + + def __init__(self, version, code, reason, headers, raw_headers, + should_close, compression, upgrade, chunked): + self.version = version + self.code = code + self.reason = reason + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + + def __repr__(self): + info = [] + info.append(("version", self.version)) + info.append(("code", self.code)) + info.append(("reason", self.reason)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + +cdef _new_response_message(object version, + int code, + str reason, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked): + cdef RawResponseMessage ret + ret = RawResponseMessage.__new__(RawResponseMessage) + ret.version = version + ret.code = code + ret.reason = reason + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + return ret + + +@cython.internal +cdef class HttpParser: + + cdef: + cparser.llhttp_t* _cparser + cparser.llhttp_settings_t* _csettings + + bytearray _raw_name + bytearray _raw_value + bint _has_value + + object _protocol + object _loop + object _timer + + size_t _max_line_size + size_t _max_field_size + size_t _max_headers + bint _response_with_body + bint _read_until_eof + + bint _started + object _url + bytearray _buf + str _path + str _reason + object _headers + list _raw_headers + bint _upgraded + list _messages + object _payload + bint _payload_error + object _payload_exception + object _last_error + bint _auto_decompress + int _limit + + str _content_encoding + + Py_buffer py_buf + + def __cinit__(self): + self._cparser = \ + PyMem_Malloc(sizeof(cparser.llhttp_t)) + if self._cparser is NULL: + raise MemoryError() + + self._csettings = \ + PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) + if self._csettings is NULL: + raise MemoryError() + + def __dealloc__(self): + PyMem_Free(self._cparser) + PyMem_Free(self._csettings) + + cdef _init( + self, cparser.llhttp_type mode, + object protocol, object loop, int limit, + object timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True, + ): + cparser.llhttp_settings_init(self._csettings) + cparser.llhttp_init(self._cparser, mode, self._csettings) + self._cparser.data = self + self._cparser.content_length = 0 + + self._protocol = protocol + self._loop = loop + self._timer = timer + + self._buf = bytearray() + self._payload = None + self._payload_error = 0 + self._payload_exception = payload_exception + self._messages = [] + + self._raw_name = bytearray() + self._raw_value = bytearray() + self._has_value = False + + self._max_line_size = max_line_size + self._max_headers = max_headers + self._max_field_size = max_field_size + self._response_with_body = response_with_body + self._read_until_eof = read_until_eof + self._upgraded = False + self._auto_decompress = auto_decompress + self._content_encoding = None + + self._csettings.on_url = cb_on_url + self._csettings.on_status = cb_on_status + self._csettings.on_header_field = cb_on_header_field + self._csettings.on_header_value = cb_on_header_value + self._csettings.on_headers_complete = cb_on_headers_complete + self._csettings.on_body = cb_on_body + self._csettings.on_message_begin = cb_on_message_begin + self._csettings.on_message_complete = cb_on_message_complete + self._csettings.on_chunk_header = cb_on_chunk_header + self._csettings.on_chunk_complete = cb_on_chunk_complete + + self._last_error = None + self._limit = limit + + cdef _process_header(self): + if self._raw_name: + raw_name = bytes(self._raw_name) + raw_value = bytes(self._raw_value) + + name = find_header(raw_name) + value = raw_value.decode('utf-8', 'surrogateescape') + + self._headers.add(name, value) + + if name is CONTENT_ENCODING: + self._content_encoding = value + + PyByteArray_Resize(self._raw_name, 0) + PyByteArray_Resize(self._raw_value, 0) + self._has_value = False + self._raw_headers.append((raw_name, raw_value)) + + cdef _on_header_field(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + if self._has_value: + self._process_header() + + size = PyByteArray_Size(self._raw_name) + PyByteArray_Resize(self._raw_name, size + length) + buf = PyByteArray_AsString(self._raw_name) + memcpy(buf + size, at, length) + + cdef _on_header_value(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + + size = PyByteArray_Size(self._raw_value) + PyByteArray_Resize(self._raw_value, size + length) + buf = PyByteArray_AsString(self._raw_value) + memcpy(buf + size, at, length) + self._has_value = True + + cdef _on_headers_complete(self): + self._process_header() + + method = http_method_str(self._cparser.method) + should_close = not cparser.llhttp_should_keep_alive(self._cparser) + upgrade = self._cparser.upgrade + chunked = self._cparser.flags & cparser.F_CHUNKED + + raw_headers = tuple(self._raw_headers) + headers = CIMultiDictProxy(self._headers) + + if upgrade or self._cparser.method == cparser.HTTP_CONNECT: + self._upgraded = True + + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + encoding = None + enc = self._content_encoding + if enc is not None: + self._content_encoding = None + enc = enc.lower() + if enc in ('gzip', 'deflate', 'br'): + encoding = enc + + if self._cparser.type == cparser.HTTP_REQUEST: + msg = _new_request_message( + method, self._path, + self.http_version(), headers, raw_headers, + should_close, encoding, upgrade, chunked, self._url) + else: + msg = _new_response_message( + self.http_version(), self._cparser.status_code, self._reason, + headers, raw_headers, should_close, encoding, + upgrade, chunked) + + if ( + ULLONG_MAX > self._cparser.content_length > 0 or chunked or + self._cparser.method == cparser.HTTP_CONNECT or + (self._cparser.status_code >= 199 and + self._cparser.content_length == 0 and + self._read_until_eof) + ): + payload = StreamReader( + self._protocol, timer=self._timer, loop=self._loop, + limit=self._limit) + else: + payload = EMPTY_PAYLOAD + + self._payload = payload + if encoding is not None and self._auto_decompress: + self._payload = DeflateBuffer(payload, encoding) + + if not self._response_with_body: + payload = EMPTY_PAYLOAD + + self._messages.append((msg, payload)) + + cdef _on_message_complete(self): + self._payload.feed_eof() + self._payload = None + + cdef _on_chunk_header(self): + self._payload.begin_http_chunk_receiving() + + cdef _on_chunk_complete(self): + self._payload.end_http_chunk_receiving() + + cdef object _on_status_complete(self): + pass + + cdef inline http_version(self): + cdef cparser.llhttp_t* parser = self._cparser + + if parser.http_major == 1: + if parser.http_minor == 0: + return HttpVersion10 + elif parser.http_minor == 1: + return HttpVersion11 + + return HttpVersion(parser.http_major, parser.http_minor) + + ### Public API ### + + def feed_eof(self): + cdef bytes desc + + if self._payload is not None: + if self._cparser.flags & cparser.F_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header.") + elif self._cparser.flags & cparser.F_CONTENT_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header.") + elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: + desc = cparser.llhttp_get_error_reason(self._cparser) + raise PayloadEncodingError(desc.decode('latin-1')) + else: + self._payload.feed_eof() + elif self._started: + self._on_headers_complete() + if self._messages: + return self._messages[-1][0] + + def feed_data(self, data): + cdef: + size_t data_len + size_t nb + cdef cparser.llhttp_errno_t errno + + PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) + data_len = self.py_buf.len + + errno = cparser.llhttp_execute( + self._cparser, + self.py_buf.buf, + data_len) + + if errno is cparser.HPE_PAUSED_UPGRADE: + cparser.llhttp_resume_after_upgrade(self._cparser) + + nb = cparser.llhttp_get_error_pos(self._cparser) - self.py_buf.buf + + PyBuffer_Release(&self.py_buf) + + if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): + if self._payload_error == 0: + if self._last_error is not None: + ex = self._last_error + self._last_error = None + else: + ex = parser_error_from_errno(self._cparser) + self._payload = None + raise ex + + if self._messages: + messages = self._messages + self._messages = [] + else: + messages = () + + if self._upgraded: + return messages, True, data[nb:] + else: + return messages, False, b'' + + def set_upgraded(self, val): + self._upgraded = val + + +cdef class HttpRequestParser(HttpParser): + + def __init__( + self, protocol, loop, int limit, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True, + ): + self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body, read_until_eof, + auto_decompress) + + cdef object _on_status_complete(self): + cdef int idx1, idx2 + if not self._buf: + return + self._path = self._buf.decode('utf-8', 'surrogateescape') + try: + idx3 = len(self._path) + if self._cparser.method == cparser.HTTP_CONNECT: + # authority-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 + self._url = URL.build(authority=self._path, encoded=True) + elif idx3 > 1 and self._path[0] == '/': + # origin-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 + idx1 = self._path.find("?") + if idx1 == -1: + query = "" + idx2 = self._path.find("#") + if idx2 == -1: + path = self._path + fragment = "" + else: + path = self._path[0: idx2] + fragment = self._path[idx2+1:] + + else: + path = self._path[0:idx1] + idx1 += 1 + idx2 = self._path.find("#", idx1+1) + if idx2 == -1: + query = self._path[idx1:] + fragment = "" + else: + query = self._path[idx1: idx2] + fragment = self._path[idx2+1:] + + self._url = URL.build( + path=path, + query_string=query, + fragment=fragment, + encoded=True, + ) + else: + # absolute-form for proxy maybe, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 + self._url = URL(self._path, encoded=True) + finally: + PyByteArray_Resize(self._buf, 0) + + +cdef class HttpResponseParser(HttpParser): + + def __init__( + self, protocol, loop, int limit, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True + ): + self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body, read_until_eof, + auto_decompress) + + cdef object _on_status_complete(self): + if self._buf: + self._reason = self._buf.decode('utf-8', 'surrogateescape') + PyByteArray_Resize(self._buf, 0) + else: + self._reason = self._reason or '' + +cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + + pyparser._started = True + pyparser._headers = CIMultiDict() + pyparser._raw_headers = [] + PyByteArray_Resize(pyparser._buf, 0) + pyparser._path = None + pyparser._reason = None + return 0 + + +cdef int cb_on_url(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_status(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef str reason + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_field(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + pyparser._on_status_complete() + size = len(pyparser._raw_name) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header name is too long', pyparser._max_field_size, size) + pyparser._on_header_field(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_value(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + size = len(pyparser._raw_value) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header value is too long', pyparser._max_field_size, size) + pyparser._on_header_value(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_status_complete() + pyparser._on_headers_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + if ( + pyparser._cparser.upgrade or + pyparser._cparser.method == cparser.HTTP_CONNECT + ): + return 2 + else: + return 0 + + +cdef int cb_on_body(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef bytes body = at[:length] + try: + pyparser._payload.feed_data(body, length) + except BaseException as exc: + if pyparser._payload_exception is not None: + pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) + else: + pyparser._payload.set_exception(exc) + pyparser._payload_error = 1 + return -1 + else: + return 0 + + +cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._started = False + pyparser._on_message_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_header() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef parser_error_from_errno(cparser.llhttp_t* parser): + cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser) + cdef bytes desc = cparser.llhttp_get_error_reason(parser) + + if errno in (cparser.HPE_CB_MESSAGE_BEGIN, + cparser.HPE_CB_HEADERS_COMPLETE, + cparser.HPE_CB_MESSAGE_COMPLETE, + cparser.HPE_CB_CHUNK_HEADER, + cparser.HPE_CB_CHUNK_COMPLETE, + cparser.HPE_INVALID_CONSTANT, + cparser.HPE_INVALID_HEADER_TOKEN, + cparser.HPE_INVALID_CONTENT_LENGTH, + cparser.HPE_INVALID_CHUNK_SIZE, + cparser.HPE_INVALID_EOF_STATE, + cparser.HPE_INVALID_TRANSFER_ENCODING): + cls = BadHttpMessage + + elif errno == cparser.HPE_INVALID_STATUS: + cls = BadStatusLine + + elif errno == cparser.HPE_INVALID_METHOD: + cls = BadStatusLine + + elif errno == cparser.HPE_INVALID_VERSION: + cls = BadStatusLine + + elif errno == cparser.HPE_INVALID_URL: + cls = InvalidURLError + + else: + cls = BadHttpMessage + + return cls(desc.decode('latin-1')) diff --git a/venv/lib/python3.8/site-packages/aiohttp/_http_writer.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/aiohttp/_http_writer.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..1721723 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/_http_writer.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/_http_writer.pyx b/venv/lib/python3.8/site-packages/aiohttp/_http_writer.pyx new file mode 120000 index 0000000..d804b12 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/_http_writer.pyx @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/68/81/c0/a7c838655e646c645d99971efaf5e310bc3633a7c62b226e39d81842ac \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/_websocket.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/aiohttp/_websocket.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..75ce7db Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiohttp/_websocket.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/aiohttp/_websocket.pyx b/venv/lib/python3.8/site-packages/aiohttp/_websocket.pyx new file mode 120000 index 0000000..1c0c7fb --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/_websocket.pyx @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d5/7b/8e/48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/abc.py b/venv/lib/python3.8/site-packages/aiohttp/abc.py new file mode 100644 index 0000000..44a3bda --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/abc.py @@ -0,0 +1,207 @@ +import asyncio +import logging +from abc import ABC, abstractmethod +from collections.abc import Sized +from http.cookies import BaseCookie, Morsel +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + Generator, + Iterable, + List, + Optional, + Tuple, +) + +from multidict import CIMultiDict +from yarl import URL + +from .helpers import get_running_loop +from .typedefs import LooseCookies + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + from .web_exceptions import HTTPException + from .web_request import BaseRequest, Request + from .web_response import StreamResponse +else: + BaseRequest = Request = Application = StreamResponse = None + HTTPException = None + + +class AbstractRouter(ABC): + def __init__(self) -> None: + self._frozen = False + + def post_init(self, app: Application) -> None: + """Post init stage. + + Not an abstract method for sake of backward compatibility, + but if the router wants to be aware of the application + it can override this. + """ + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + """Freeze router.""" + self._frozen = True + + @abstractmethod + async def resolve(self, request: Request) -> "AbstractMatchInfo": + """Return MATCH_INFO for given request""" + + +class AbstractMatchInfo(ABC): + @property # pragma: no branch + @abstractmethod + def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: + """Execute matched request handler""" + + @property + @abstractmethod + def expect_handler(self) -> Callable[[Request], Awaitable[None]]: + """Expect handler for 100-continue processing""" + + @property # pragma: no branch + @abstractmethod + def http_exception(self) -> Optional[HTTPException]: + """HTTPException instance raised on router's resolving, or None""" + + @abstractmethod # pragma: no branch + def get_info(self) -> Dict[str, Any]: + """Return a dict with additional info useful for introspection""" + + @property # pragma: no branch + @abstractmethod + def apps(self) -> Tuple[Application, ...]: + """Stack of nested applications. + + Top level application is left-most element. + + """ + + @abstractmethod + def add_app(self, app: Application) -> None: + """Add application to the nested apps stack.""" + + @abstractmethod + def freeze(self) -> None: + """Freeze the match info. + + The method is called after route resolution. + + After the call .add_app() is forbidden. + + """ + + +class AbstractView(ABC): + """Abstract class based view.""" + + def __init__(self, request: Request) -> None: + self._request = request + + @property + def request(self) -> Request: + """Request instance.""" + return self._request + + @abstractmethod + def __await__(self) -> Generator[Any, None, StreamResponse]: + """Execute the view handler.""" + + +class AbstractResolver(ABC): + """Abstract DNS resolver.""" + + @abstractmethod + async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: + """Return IP address for given hostname""" + + @abstractmethod + async def close(self) -> None: + """Release resolver""" + + +if TYPE_CHECKING: # pragma: no cover + IterableBase = Iterable[Morsel[str]] +else: + IterableBase = Iterable + + +ClearCookiePredicate = Callable[["Morsel[str]"], bool] + + +class AbstractCookieJar(Sized, IterableBase): + """Abstract Cookie Jar.""" + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + self._loop = get_running_loop(loop) + + @abstractmethod + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + """Clear all cookies if no predicate is passed.""" + + @abstractmethod + def clear_domain(self, domain: str) -> None: + """Clear all cookies for domain and all subdomains.""" + + @abstractmethod + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + + @abstractmethod + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + """Return the jar's cookies filtered by their attributes.""" + + +class AbstractStreamWriter(ABC): + """Abstract stream writer.""" + + buffer_size = 0 + output_size = 0 + length: Optional[int] = 0 + + @abstractmethod + async def write(self, chunk: bytes) -> None: + """Write chunk into stream.""" + + @abstractmethod + async def write_eof(self, chunk: bytes = b"") -> None: + """Write last chunk.""" + + @abstractmethod + async def drain(self) -> None: + """Flush the write buffer.""" + + @abstractmethod + def enable_compression(self, encoding: str = "deflate") -> None: + """Enable HTTP body compression""" + + @abstractmethod + def enable_chunking(self) -> None: + """Enable HTTP chunked mode""" + + @abstractmethod + async def write_headers( + self, status_line: str, headers: "CIMultiDict[str]" + ) -> None: + """Write HTTP headers""" + + +class AbstractAccessLogger(ABC): + """Abstract writer to access log.""" + + def __init__(self, logger: logging.Logger, log_format: str) -> None: + self.logger = logger + self.log_format = log_format + + @abstractmethod + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: + """Emit log to logger.""" diff --git a/venv/lib/python3.8/site-packages/aiohttp/base_protocol.py b/venv/lib/python3.8/site-packages/aiohttp/base_protocol.py new file mode 100644 index 0000000..8189835 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/base_protocol.py @@ -0,0 +1,87 @@ +import asyncio +from typing import Optional, cast + +from .tcp_helpers import tcp_nodelay + + +class BaseProtocol(asyncio.Protocol): + __slots__ = ( + "_loop", + "_paused", + "_drain_waiter", + "_connection_lost", + "_reading_paused", + "transport", + ) + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop: asyncio.AbstractEventLoop = loop + self._paused = False + self._drain_waiter: Optional[asyncio.Future[None]] = None + self._connection_lost = False + self._reading_paused = False + + self.transport: Optional[asyncio.Transport] = None + + def pause_writing(self) -> None: + assert not self._paused + self._paused = True + + def resume_writing(self) -> None: + assert self._paused + self._paused = False + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def pause_reading(self) -> None: + if not self._reading_paused and self.transport is not None: + try: + self.transport.pause_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = True + + def resume_reading(self) -> None: + if self._reading_paused and self.transport is not None: + try: + self.transport.resume_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + tr = cast(asyncio.Transport, transport) + tcp_nodelay(tr, True) + self.transport = tr + + def connection_lost(self, exc: Optional[BaseException]) -> None: + self._connection_lost = True + # Wake up the writer if currently paused. + self.transport = None + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) + + async def _drain_helper(self) -> None: + if self._connection_lost: + raise ConnectionResetError("Connection lost") + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + waiter = self._loop.create_future() + self._drain_waiter = waiter + await asyncio.shield(waiter) diff --git a/venv/lib/python3.8/site-packages/aiohttp/client.py b/venv/lib/python3.8/site-packages/aiohttp/client.py new file mode 100644 index 0000000..0d0f4c1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/client.py @@ -0,0 +1,1305 @@ +"""HTTP Client for asyncio.""" + +import asyncio +import base64 +import hashlib +import json +import os +import sys +import traceback +import warnings +from contextlib import suppress +from types import SimpleNamespace, TracebackType +from typing import ( + Any, + Awaitable, + Callable, + Coroutine, + FrozenSet, + Generator, + Generic, + Iterable, + List, + Mapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +import attr +from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr +from yarl import URL + +from . import hdrs, http, payload +from .abc import AbstractCookieJar +from .client_exceptions import ( + ClientConnectionError as ClientConnectionError, + ClientConnectorCertificateError as ClientConnectorCertificateError, + ClientConnectorError as ClientConnectorError, + ClientConnectorSSLError as ClientConnectorSSLError, + ClientError as ClientError, + ClientHttpProxyError as ClientHttpProxyError, + ClientOSError as ClientOSError, + ClientPayloadError as ClientPayloadError, + ClientProxyConnectionError as ClientProxyConnectionError, + ClientResponseError as ClientResponseError, + ClientSSLError as ClientSSLError, + ContentTypeError as ContentTypeError, + InvalidURL as InvalidURL, + ServerConnectionError as ServerConnectionError, + ServerDisconnectedError as ServerDisconnectedError, + ServerFingerprintMismatch as ServerFingerprintMismatch, + ServerTimeoutError as ServerTimeoutError, + TooManyRedirects as TooManyRedirects, + WSServerHandshakeError as WSServerHandshakeError, +) +from .client_reqrep import ( + ClientRequest as ClientRequest, + ClientResponse as ClientResponse, + Fingerprint as Fingerprint, + RequestInfo as RequestInfo, + _merge_ssl_params, +) +from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse +from .connector import ( + BaseConnector as BaseConnector, + NamedPipeConnector as NamedPipeConnector, + TCPConnector as TCPConnector, + UnixConnector as UnixConnector, +) +from .cookiejar import CookieJar +from .helpers import ( + DEBUG, + PY_36, + BasicAuth, + TimeoutHandle, + ceil_timeout, + get_env_proxy_for_url, + get_running_loop, + sentinel, + strip_auth_from_url, +) +from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter +from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse +from .streams import FlowControlDataQueue +from .tracing import Trace, TraceConfig +from .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL + +__all__ = ( + # client_exceptions + "ClientConnectionError", + "ClientConnectorCertificateError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponseError", + "ClientSSLError", + "ContentTypeError", + "InvalidURL", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "TooManyRedirects", + "WSServerHandshakeError", + # client_reqrep + "ClientRequest", + "ClientResponse", + "Fingerprint", + "RequestInfo", + # connector + "BaseConnector", + "TCPConnector", + "UnixConnector", + "NamedPipeConnector", + # client_ws + "ClientWebSocketResponse", + # client + "ClientSession", + "ClientTimeout", + "request", +) + + +try: + from ssl import SSLContext +except ImportError: # pragma: no cover + SSLContext = object # type: ignore[misc,assignment] + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ClientTimeout: + total: Optional[float] = None + connect: Optional[float] = None + sock_read: Optional[float] = None + sock_connect: Optional[float] = None + + # pool_queue_timeout: Optional[float] = None + # dns_resolution_timeout: Optional[float] = None + # socket_connect_timeout: Optional[float] = None + # connection_acquiring_timeout: Optional[float] = None + # new_connection_timeout: Optional[float] = None + # http_header_timeout: Optional[float] = None + # response_body_timeout: Optional[float] = None + + # to create a timeout specific for a single request, either + # - create a completely new one to overwrite the default + # - or use http://www.attrs.org/en/stable/api.html#attr.evolve + # to overwrite the defaults + + +# 5 Minute default read timeout +DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) + +_RetType = TypeVar("_RetType") + + +class ClientSession: + """First-class interface for making HTTP requests.""" + + ATTRS = frozenset( + [ + "_base_url", + "_source_traceback", + "_connector", + "requote_redirect_url", + "_loop", + "_cookie_jar", + "_connector_owner", + "_default_auth", + "_version", + "_json_serialize", + "_requote_redirect_url", + "_timeout", + "_raise_for_status", + "_auto_decompress", + "_trust_env", + "_default_headers", + "_skip_auto_headers", + "_request_class", + "_response_class", + "_ws_response_class", + "_trace_configs", + "_read_bufsize", + ] + ) + + _source_traceback = None # type: Optional[traceback.StackSummary] + _connector = None # type: Optional[BaseConnector] + + def __init__( + self, + base_url: Optional[StrOrURL] = None, + *, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + json_serialize: JSONEncoder = json.dumps, + request_class: Type[ClientRequest] = ClientRequest, + response_class: Type[ClientResponse] = ClientResponse, + ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse, + version: HttpVersion = http.HttpVersion11, + cookie_jar: Optional[AbstractCookieJar] = None, + connector_owner: bool = True, + raise_for_status: bool = False, + read_timeout: Union[float, object] = sentinel, + conn_timeout: Optional[float] = None, + timeout: Union[object, ClientTimeout] = sentinel, + auto_decompress: bool = True, + trust_env: bool = False, + requote_redirect_url: bool = True, + trace_configs: Optional[List[TraceConfig]] = None, + read_bufsize: int = 2**16, + ) -> None: + if loop is None: + if connector is not None: + loop = connector._loop + + loop = get_running_loop(loop) + + if base_url is None or isinstance(base_url, URL): + self._base_url: Optional[URL] = base_url + else: + self._base_url = URL(base_url) + assert ( + self._base_url.origin() == self._base_url + ), "Only absolute URLs without path part are supported" + + if connector is None: + connector = TCPConnector(loop=loop) + + if connector._loop is not loop: + raise RuntimeError("Session and connector has to use same event loop") + + self._loop = loop + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + if cookie_jar is None: + cookie_jar = CookieJar(loop=loop) + self._cookie_jar = cookie_jar + + if cookies is not None: + self._cookie_jar.update_cookies(cookies) + + self._connector = connector + self._connector_owner = connector_owner + self._default_auth = auth + self._version = version + self._json_serialize = json_serialize + if timeout is sentinel: + self._timeout = DEFAULT_TIMEOUT + if read_timeout is not sentinel: + warnings.warn( + "read_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + self._timeout = attr.evolve(self._timeout, total=read_timeout) + if conn_timeout is not None: + self._timeout = attr.evolve(self._timeout, connect=conn_timeout) + warnings.warn( + "conn_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + else: + self._timeout = timeout # type: ignore[assignment] + if read_timeout is not sentinel: + raise ValueError( + "read_timeout and timeout parameters " + "conflict, please setup " + "timeout.read" + ) + if conn_timeout is not None: + raise ValueError( + "conn_timeout and timeout parameters " + "conflict, please setup " + "timeout.connect" + ) + self._raise_for_status = raise_for_status + self._auto_decompress = auto_decompress + self._trust_env = trust_env + self._requote_redirect_url = requote_redirect_url + self._read_bufsize = read_bufsize + + # Convert to list of tuples + if headers: + real_headers: CIMultiDict[str] = CIMultiDict(headers) + else: + real_headers = CIMultiDict() + self._default_headers: CIMultiDict[str] = real_headers + if skip_auto_headers is not None: + self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers) + else: + self._skip_auto_headers = frozenset() + + self._request_class = request_class + self._response_class = response_class + self._ws_response_class = ws_response_class + + self._trace_configs = trace_configs or [] + for trace_config in self._trace_configs: + trace_config.freeze() + + def __init_subclass__(cls: Type["ClientSession"]) -> None: + warnings.warn( + "Inheritance class {} from ClientSession " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=2, + ) + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom ClientSession.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def __del__(self, _warnings: Any = warnings) -> None: + if not self.closed: + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + _warnings.warn( + f"Unclosed client session {self!r}", ResourceWarning, **kwargs + ) + context = {"client_session": self, "message": "Unclosed client session"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def request( + self, method: str, url: StrOrURL, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP request.""" + return _RequestContextManager(self._request(method, url, **kwargs)) + + def _build_url(self, str_or_url: StrOrURL) -> URL: + url = URL(str_or_url) + if self._base_url is None: + return url + else: + assert not url.is_absolute() and url.path.startswith("/") + return self._base_url.join(url) + + async def _request( + self, + method: str, + str_or_url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Optional[bool] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, object] = sentinel, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None, + proxy_headers: Optional[LooseHeaders] = None, + trace_request_ctx: Optional[SimpleNamespace] = None, + read_bufsize: Optional[int] = None, + ) -> ClientResponse: + + # NOTE: timeout clamps existing connect and read timeouts. We cannot + # set the default to None because we need to detect if the user wants + # to use the existing timeouts by setting timeout to None. + + if self.closed: + raise RuntimeError("Session is closed") + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + if data is not None and json is not None: + raise ValueError( + "data and json parameters can not be used at the same time" + ) + elif json is not None: + data = payload.JsonPayload(json, dumps=self._json_serialize) + + if not isinstance(chunked, bool) and chunked is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + + redirects = 0 + history = [] + version = self._version + + # Merge with default headers and transform to CIMultiDict + headers = self._prepare_headers(headers) + proxy_headers = self._prepare_headers(proxy_headers) + + try: + url = self._build_url(str_or_url) + except ValueError as e: + raise InvalidURL(str_or_url) from e + + skip_headers = set(self._skip_auto_headers) + if skip_auto_headers is not None: + for i in skip_auto_headers: + skip_headers.add(istr(i)) + + if proxy is not None: + try: + proxy = URL(proxy) + except ValueError as e: + raise InvalidURL(proxy) from e + + if timeout is sentinel: + real_timeout: ClientTimeout = self._timeout + else: + if not isinstance(timeout, ClientTimeout): + real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type] + else: + real_timeout = timeout + # timeout is cumulative for all request operations + # (request, redirects, responses, data consuming) + tm = TimeoutHandle(self._loop, real_timeout.total) + handle = tm.start() + + if read_bufsize is None: + read_bufsize = self._read_bufsize + + traces = [ + Trace( + self, + trace_config, + trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx), + ) + for trace_config in self._trace_configs + ] + + for trace in traces: + await trace.send_request_start(method, url.update_query(params), headers) + + timer = tm.timer() + try: + with timer: + while True: + url, auth_from_url = strip_auth_from_url(url) + if auth and auth_from_url: + raise ValueError( + "Cannot combine AUTH argument with " + "credentials encoded in URL" + ) + + if auth is None: + auth = auth_from_url + if auth is None: + auth = self._default_auth + # It would be confusing if we support explicit + # Authorization header with auth argument + if ( + headers is not None + and auth is not None + and hdrs.AUTHORIZATION in headers + ): + raise ValueError( + "Cannot combine AUTHORIZATION header " + "with AUTH argument or credentials " + "encoded in URL" + ) + + all_cookies = self._cookie_jar.filter_cookies(url) + + if cookies is not None: + tmp_cookie_jar = CookieJar() + tmp_cookie_jar.update_cookies(cookies) + req_cookies = tmp_cookie_jar.filter_cookies(url) + if req_cookies: + all_cookies.load(req_cookies) + + if proxy is not None: + proxy = URL(proxy) + elif self._trust_env: + with suppress(LookupError): + proxy, proxy_auth = get_env_proxy_for_url(url) + + req = self._request_class( + method, + url, + params=params, + headers=headers, + skip_auto_headers=skip_headers, + data=data, + cookies=all_cookies, + auth=auth, + version=version, + compress=compress, + chunked=chunked, + expect100=expect100, + loop=self._loop, + response_class=self._response_class, + proxy=proxy, + proxy_auth=proxy_auth, + timer=timer, + session=self, + ssl=ssl, + proxy_headers=proxy_headers, + traces=traces, + ) + + # connection timeout + try: + async with ceil_timeout(real_timeout.connect): + assert self._connector is not None + conn = await self._connector.connect( + req, traces=traces, timeout=real_timeout + ) + except asyncio.TimeoutError as exc: + raise ServerTimeoutError( + "Connection timeout " "to host {}".format(url) + ) from exc + + assert conn.transport is not None + + assert conn.protocol is not None + conn.protocol.set_response_params( + timer=timer, + skip_payload=method.upper() == "HEAD", + read_until_eof=read_until_eof, + auto_decompress=self._auto_decompress, + read_timeout=real_timeout.sock_read, + read_bufsize=read_bufsize, + ) + + try: + try: + resp = await req.send(conn) + try: + await resp.start(conn) + except BaseException: + resp.close() + raise + except BaseException: + conn.close() + raise + except ClientError: + raise + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientOSError(*exc.args) from exc + + self._cookie_jar.update_cookies(resp.cookies, resp.url) + + # redirects + if resp.status in (301, 302, 303, 307, 308) and allow_redirects: + + for trace in traces: + await trace.send_request_redirect( + method, url.update_query(params), headers, resp + ) + + redirects += 1 + history.append(resp) + if max_redirects and redirects >= max_redirects: + resp.close() + raise TooManyRedirects( + history[0].request_info, tuple(history) + ) + + # For 301 and 302, mimic IE, now changed in RFC + # https://github.com/kennethreitz/requests/pull/269 + if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or ( + resp.status in (301, 302) and resp.method == hdrs.METH_POST + ): + method = hdrs.METH_GET + data = None + if headers.get(hdrs.CONTENT_LENGTH): + headers.pop(hdrs.CONTENT_LENGTH) + + r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get( + hdrs.URI + ) + if r_url is None: + # see github.com/aio-libs/aiohttp/issues/2022 + break + else: + # reading from correct redirection + # response is forbidden + resp.release() + + try: + parsed_url = URL( + r_url, encoded=not self._requote_redirect_url + ) + + except ValueError as e: + raise InvalidURL(r_url) from e + + scheme = parsed_url.scheme + if scheme not in ("http", "https", ""): + resp.close() + raise ValueError("Can redirect only to http or https") + elif not scheme: + parsed_url = url.join(parsed_url) + + if url.origin() != parsed_url.origin(): + auth = None + headers.pop(hdrs.AUTHORIZATION, None) + + url = parsed_url + params = None + resp.release() + continue + + break + + # check response status + if raise_for_status is None: + raise_for_status = self._raise_for_status + if raise_for_status: + resp.raise_for_status() + + # register connection + if handle is not None: + if resp.connection is not None: + resp.connection.add_callback(handle.cancel) + else: + handle.cancel() + + resp._history = tuple(history) + + for trace in traces: + await trace.send_request_end( + method, url.update_query(params), headers, resp + ) + return resp + + except BaseException as e: + # cleanup timer + tm.close() + if handle: + handle.cancel() + handle = None + + for trace in traces: + await trace.send_request_exception( + method, url.update_query(params), headers, e + ) + raise + + def ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, bool, None, Fingerprint] = None, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> "_WSRequestContextManager": + """Initiate websocket connection.""" + return _WSRequestContextManager( + self._ws_connect( + url, + method=method, + protocols=protocols, + timeout=timeout, + receive_timeout=receive_timeout, + autoclose=autoclose, + autoping=autoping, + heartbeat=heartbeat, + auth=auth, + origin=origin, + params=params, + headers=headers, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + verify_ssl=verify_ssl, + fingerprint=fingerprint, + ssl_context=ssl_context, + proxy_headers=proxy_headers, + compress=compress, + max_msg_size=max_msg_size, + ) + ) + + async def _ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, bool, None, Fingerprint] = None, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> ClientWebSocketResponse: + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + else: + real_headers = CIMultiDict(headers) + + default_headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_VERSION: "13", + } + + for key, value in default_headers.items(): + real_headers.setdefault(key, value) + + sec_key = base64.b64encode(os.urandom(16)) + real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() + + if protocols: + real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols) + if origin is not None: + real_headers[hdrs.ORIGIN] = origin + if compress: + extstr = ws_ext_gen(compress=compress) + real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + # send request + resp = await self.request( + method, + url, + params=params, + headers=real_headers, + read_until_eof=False, + auth=auth, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + proxy_headers=proxy_headers, + ) + + try: + # check handshake + if resp.status != 101: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid response status", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid upgrade header", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid connection header", + status=resp.status, + headers=resp.headers, + ) + + # key calculation + r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "") + match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() + if r_key != match: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid challenge response", + status=resp.status, + headers=resp.headers, + ) + + # websocket protocol + protocol = None + if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: + resp_protocols = [ + proto.strip() + for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") + ] + + for proto in resp_protocols: + if proto in protocols: + protocol = proto + break + + # websocket compress + notakeover = False + if compress: + compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + if compress_hdrs: + try: + compress, notakeover = ws_ext_parse(compress_hdrs) + except WSHandshakeError as exc: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message=exc.args[0], + status=resp.status, + headers=resp.headers, + ) from exc + else: + compress = 0 + notakeover = False + + conn = resp.connection + assert conn is not None + conn_proto = conn.protocol + assert conn_proto is not None + transport = conn.transport + assert transport is not None + reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue( + conn_proto, 2**16, loop=self._loop + ) + conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) + writer = WebSocketWriter( + conn_proto, + transport, + use_mask=True, + compress=compress, + notakeover=notakeover, + ) + except BaseException: + resp.close() + raise + else: + return self._ws_response_class( + reader, + writer, + protocol, + resp, + timeout, + autoclose, + autoping, + self._loop, + receive_timeout=receive_timeout, + heartbeat=heartbeat, + compress=compress, + client_notakeover=notakeover, + ) + + def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]": + """Add default headers and transform it to CIMultiDict""" + # Convert headers to MultiDict + result = CIMultiDict(self._default_headers) + if headers: + if not isinstance(headers, (MultiDictProxy, MultiDict)): + headers = CIMultiDict(headers) + added_names: Set[str] = set() + for key, value in headers.items(): + if key in added_names: + result.add(key, value) + else: + result[key] = value + added_names.add(key) + return result + + def get( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP GET request.""" + return _RequestContextManager( + self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs) + ) + + def options( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP OPTIONS request.""" + return _RequestContextManager( + self._request( + hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def head( + self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP HEAD request.""" + return _RequestContextManager( + self._request( + hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def post( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP POST request.""" + return _RequestContextManager( + self._request(hdrs.METH_POST, url, data=data, **kwargs) + ) + + def put( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PUT request.""" + return _RequestContextManager( + self._request(hdrs.METH_PUT, url, data=data, **kwargs) + ) + + def patch( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, url, data=data, **kwargs) + ) + + def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": + """Perform HTTP DELETE request.""" + return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs)) + + async def close(self) -> None: + """Close underlying connector. + + Release all acquired resources. + """ + if not self.closed: + if self._connector is not None and self._connector_owner: + await self._connector.close() + self._connector = None + + @property + def closed(self) -> bool: + """Is client session closed. + + A readonly property. + """ + return self._connector is None or self._connector.closed + + @property + def connector(self) -> Optional[BaseConnector]: + """Connector instance used for the session.""" + return self._connector + + @property + def cookie_jar(self) -> AbstractCookieJar: + """The session cookies.""" + return self._cookie_jar + + @property + def version(self) -> Tuple[int, int]: + """The session HTTP protocol version.""" + return self._version + + @property + def requote_redirect_url(self) -> bool: + """Do URL requoting on redirection handling.""" + return self._requote_redirect_url + + @requote_redirect_url.setter + def requote_redirect_url(self, val: bool) -> None: + """Do URL requoting on redirection handling.""" + warnings.warn( + "session.requote_redirect_url modification " "is deprecated #2778", + DeprecationWarning, + stacklevel=2, + ) + self._requote_redirect_url = val + + @property + def loop(self) -> asyncio.AbstractEventLoop: + """Session's loop.""" + warnings.warn( + "client.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def timeout(self) -> ClientTimeout: + """Timeout for the session.""" + return self._timeout + + @property + def headers(self) -> "CIMultiDict[str]": + """The default headers of the client session.""" + return self._default_headers + + @property + def skip_auto_headers(self) -> FrozenSet[istr]: + """Headers for which autogeneration should be skipped""" + return self._skip_auto_headers + + @property + def auth(self) -> Optional[BasicAuth]: + """An object that represents HTTP Basic Authorization""" + return self._default_auth + + @property + def json_serialize(self) -> JSONEncoder: + """Json serializer callable""" + return self._json_serialize + + @property + def connector_owner(self) -> bool: + """Should connector be closed on session closing""" + return self._connector_owner + + @property + def raise_for_status( + self, + ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]: + """Should `ClientResponse.raise_for_status()` be called for each response.""" + return self._raise_for_status + + @property + def auto_decompress(self) -> bool: + """Should the body response be automatically decompressed.""" + return self._auto_decompress + + @property + def trust_env(self) -> bool: + """ + Should proxies information from environment or netrc be trusted. + + Information is from HTTP_PROXY / HTTPS_PROXY environment variables + or ~/.netrc file if present. + """ + return self._trust_env + + @property + def trace_configs(self) -> List[TraceConfig]: + """A list of TraceConfig instances used for client tracing""" + return self._trace_configs + + def detach(self) -> None: + """Detach connector from session without closing the former. + + Session is switched to closed state anyway. + """ + self._connector = None + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "ClientSession": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]): + + __slots__ = ("_coro", "_resp") + + def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: + self._coro = coro + + def send(self, arg: None) -> "asyncio.Future[Any]": + return self._coro.send(arg) + + def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override] + self._coro.throw(arg) + + def close(self) -> None: + return self._coro.close() + + def __await__(self) -> Generator[Any, None, _RetType]: + ret = self._coro.__await__() + return ret + + def __iter__(self) -> Generator[Any, None, _RetType]: + return self.__await__() + + async def __aenter__(self) -> _RetType: + self._resp = await self._coro + return self._resp + + +class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): + __slots__ = () + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + # We're basing behavior on the exception as it can be caused by + # user code unrelated to the status of the connection. If you + # would like to close a connection you must do that + # explicitly. Otherwise connection error handling should kick in + # and close/recycle the connection as required. + self._resp.release() + + +class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): + __slots__ = () + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self._resp.close() + + +class _SessionRequestContextManager: + + __slots__ = ("_coro", "_resp", "_session") + + def __init__( + self, + coro: Coroutine["asyncio.Future[Any]", None, ClientResponse], + session: ClientSession, + ) -> None: + self._coro = coro + self._resp: Optional[ClientResponse] = None + self._session = session + + async def __aenter__(self) -> ClientResponse: + try: + self._resp = await self._coro + except BaseException: + await self._session.close() + raise + else: + return self._resp + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + assert self._resp is not None + self._resp.close() + await self._session.close() + + +def request( + method: str, + url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Optional[bool] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, object] = sentinel, + cookies: Optional[LooseCookies] = None, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + read_bufsize: Optional[int] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> _SessionRequestContextManager: + """Constructs and sends a request. + + Returns response object. + method - HTTP method + url - request url + params - (optional) Dictionary or bytes to be sent in the query + string of the new request + data - (optional) Dictionary, bytes, or file-like object to + send in the body of the request + json - (optional) Any json compatible python object + headers - (optional) Dictionary of HTTP Headers to send with + the request + cookies - (optional) Dict object to send with the request + auth - (optional) BasicAuth named tuple represent HTTP Basic Auth + auth - aiohttp.helpers.BasicAuth + allow_redirects - (optional) If set to False, do not follow + redirects + version - Request HTTP version. + compress - Set to True if request has to be compressed + with deflate encoding. + chunked - Set to chunk size for chunked transfer encoding. + expect100 - Expect 100-continue response from server. + connector - BaseConnector sub-class instance to support + connection pooling. + read_until_eof - Read response until eof if response + does not have Content-Length header. + loop - Optional event loop. + timeout - Optional ClientTimeout settings structure, 5min + total timeout by default. + Usage:: + >>> import aiohttp + >>> resp = await aiohttp.request('GET', 'http://python.org/') + >>> resp + + >>> data = await resp.read() + """ + connector_owner = False + if connector is None: + connector_owner = True + connector = TCPConnector(loop=loop, force_close=True) + + session = ClientSession( + loop=loop, + cookies=cookies, + version=version, + timeout=timeout, + connector=connector, + connector_owner=connector_owner, + ) + + return _SessionRequestContextManager( + session._request( + method, + url, + params=params, + data=data, + json=json, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + compress=compress, + chunked=chunked, + expect100=expect100, + raise_for_status=raise_for_status, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth, + read_bufsize=read_bufsize, + ), + session, + ) diff --git a/venv/lib/python3.8/site-packages/aiohttp/client_exceptions.py b/venv/lib/python3.8/site-packages/aiohttp/client_exceptions.py new file mode 100644 index 0000000..c640e1e --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/client_exceptions.py @@ -0,0 +1,342 @@ +"""HTTP related errors.""" + +import asyncio +import warnings +from typing import TYPE_CHECKING, Any, Optional, Tuple, Union + +from .http_parser import RawResponseMessage +from .typedefs import LooseHeaders + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = SSLContext = None # type: ignore[assignment] + + +if TYPE_CHECKING: # pragma: no cover + from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo +else: + RequestInfo = ClientResponse = ConnectionKey = None + +__all__ = ( + "ClientError", + "ClientConnectionError", + "ClientOSError", + "ClientConnectorError", + "ClientProxyConnectionError", + "ClientSSLError", + "ClientConnectorSSLError", + "ClientConnectorCertificateError", + "ServerConnectionError", + "ServerTimeoutError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ClientResponseError", + "ClientHttpProxyError", + "WSServerHandshakeError", + "ContentTypeError", + "ClientPayloadError", + "InvalidURL", +) + + +class ClientError(Exception): + """Base class for client connection errors.""" + + +class ClientResponseError(ClientError): + """Connection error during reading response. + + request_info: instance of RequestInfo + """ + + def __init__( + self, + request_info: RequestInfo, + history: Tuple[ClientResponse, ...], + *, + code: Optional[int] = None, + status: Optional[int] = None, + message: str = "", + headers: Optional[LooseHeaders] = None, + ) -> None: + self.request_info = request_info + if code is not None: + if status is not None: + raise ValueError( + "Both code and status arguments are provided; " + "code is deprecated, use status instead" + ) + warnings.warn( + "code argument is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + if status is not None: + self.status = status + elif code is not None: + self.status = code + else: + self.status = 0 + self.message = message + self.headers = headers + self.history = history + self.args = (request_info, history) + + def __str__(self) -> str: + return "{}, message={!r}, url={!r}".format( + self.status, + self.message, + self.request_info.real_url, + ) + + def __repr__(self) -> str: + args = f"{self.request_info!r}, {self.history!r}" + if self.status != 0: + args += f", status={self.status!r}" + if self.message != "": + args += f", message={self.message!r}" + if self.headers is not None: + args += f", headers={self.headers!r}" + return f"{type(self).__name__}({args})" + + @property + def code(self) -> int: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + return self.status + + @code.setter + def code(self, value: int) -> None: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + self.status = value + + +class ContentTypeError(ClientResponseError): + """ContentType found is not valid.""" + + +class WSServerHandshakeError(ClientResponseError): + """websocket server handshake error.""" + + +class ClientHttpProxyError(ClientResponseError): + """HTTP proxy error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + proxy responds with status other than ``200 OK`` + on ``CONNECT`` request. + """ + + +class TooManyRedirects(ClientResponseError): + """Client was redirected too many times.""" + + +class ClientConnectionError(ClientError): + """Base class for client socket errors.""" + + +class ClientOSError(ClientConnectionError, OSError): + """OSError error.""" + + +class ClientConnectorError(ClientOSError): + """Client connector error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + a connection can not be established. + """ + + def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None: + self._conn_key = connection_key + self._os_error = os_error + super().__init__(os_error.errno, os_error.strerror) + self.args = (connection_key, os_error) + + @property + def os_error(self) -> OSError: + return self._os_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]: + return self._conn_key.ssl + + def __str__(self) -> str: + return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( + self, self.ssl if self.ssl is not None else "default", self.strerror + ) + + # OSError.__reduce__ does too much black magick + __reduce__ = BaseException.__reduce__ + + +class ClientProxyConnectionError(ClientConnectorError): + """Proxy connection error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + connection to proxy can not be established. + """ + + +class UnixClientConnectorError(ClientConnectorError): + """Unix connector error. + + Raised in :py:class:`aiohttp.connector.UnixConnector` + if connection to unix socket can not be established. + """ + + def __init__( + self, path: str, connection_key: ConnectionKey, os_error: OSError + ) -> None: + self._path = path + super().__init__(connection_key, os_error) + + @property + def path(self) -> str: + return self._path + + def __str__(self) -> str: + return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format( + self, self.ssl if self.ssl is not None else "default", self.strerror + ) + + +class ServerConnectionError(ClientConnectionError): + """Server connection errors.""" + + +class ServerDisconnectedError(ServerConnectionError): + """Server disconnected.""" + + def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None: + if message is None: + message = "Server disconnected" + + self.args = (message,) + self.message = message + + +class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): + """Server timeout error.""" + + +class ServerFingerprintMismatch(ServerConnectionError): + """SSL certificate does not match expected fingerprint.""" + + def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None: + self.expected = expected + self.got = got + self.host = host + self.port = port + self.args = (expected, got, host, port) + + def __repr__(self) -> str: + return "<{} expected={!r} got={!r} host={!r} port={!r}>".format( + self.__class__.__name__, self.expected, self.got, self.host, self.port + ) + + +class ClientPayloadError(ClientError): + """Response payload error.""" + + +class InvalidURL(ClientError, ValueError): + """Invalid URL. + + URL used for fetching is malformed, e.g. it doesn't contains host + part. + """ + + # Derive from ValueError for backward compatibility + + def __init__(self, url: Any) -> None: + # The type of url is not yarl.URL because the exception can be raised + # on URL(url) call + super().__init__(url) + + @property + def url(self) -> Any: + return self.args[0] + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.url}>" + + +class ClientSSLError(ClientConnectorError): + """Base error for ssl.*Errors.""" + + +if ssl is not None: + cert_errors = (ssl.CertificateError,) + cert_errors_bases = ( + ClientSSLError, + ssl.CertificateError, + ) + + ssl_errors = (ssl.SSLError,) + ssl_error_bases = (ClientSSLError, ssl.SSLError) +else: # pragma: no cover + cert_errors = tuple() + cert_errors_bases = ( + ClientSSLError, + ValueError, + ) + + ssl_errors = tuple() + ssl_error_bases = (ClientSSLError,) + + +class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc] + """Response ssl error.""" + + +class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc] + """Response certificate error.""" + + def __init__( + self, connection_key: ConnectionKey, certificate_error: Exception + ) -> None: + self._conn_key = connection_key + self._certificate_error = certificate_error + self.args = (connection_key, certificate_error) + + @property + def certificate_error(self) -> Exception: + return self._certificate_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> bool: + return self._conn_key.is_ssl + + def __str__(self) -> str: + return ( + "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} " + "[{0.certificate_error.__class__.__name__}: " + "{0.certificate_error.args}]".format(self) + ) diff --git a/venv/lib/python3.8/site-packages/aiohttp/client_proto.py b/venv/lib/python3.8/site-packages/aiohttp/client_proto.py new file mode 100644 index 0000000..3041157 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/client_proto.py @@ -0,0 +1,251 @@ +import asyncio +from contextlib import suppress +from typing import Any, Optional, Tuple + +from .base_protocol import BaseProtocol +from .client_exceptions import ( + ClientOSError, + ClientPayloadError, + ServerDisconnectedError, + ServerTimeoutError, +) +from .helpers import BaseTimerContext +from .http import HttpResponseParser, RawResponseMessage +from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader + + +class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]): + """Helper class to adapt between Protocol and StreamReader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + BaseProtocol.__init__(self, loop=loop) + DataQueue.__init__(self, loop) + + self._should_close = False + + self._payload: Optional[StreamReader] = None + self._skip_payload = False + self._payload_parser = None + + self._timer = None + + self._tail = b"" + self._upgraded = False + self._parser: Optional[HttpResponseParser] = None + + self._read_timeout: Optional[float] = None + self._read_timeout_handle: Optional[asyncio.TimerHandle] = None + + @property + def upgraded(self) -> bool: + return self._upgraded + + @property + def should_close(self) -> bool: + if self._payload is not None and not self._payload.is_eof() or self._upgraded: + return True + + return ( + self._should_close + or self._upgraded + or self.exception() is not None + or self._payload_parser is not None + or len(self) > 0 + or bool(self._tail) + ) + + def force_close(self) -> None: + self._should_close = True + + def close(self) -> None: + transport = self.transport + if transport is not None: + transport.close() + self.transport = None + self._payload = None + self._drop_timeout() + + def is_connected(self) -> bool: + return self.transport is not None and not self.transport.is_closing() + + def connection_lost(self, exc: Optional[BaseException]) -> None: + self._drop_timeout() + + if self._payload_parser is not None: + with suppress(Exception): + self._payload_parser.feed_eof() + + uncompleted = None + if self._parser is not None: + try: + uncompleted = self._parser.feed_eof() + except Exception: + if self._payload is not None: + self._payload.set_exception( + ClientPayloadError("Response payload is not completed") + ) + + if not self.is_eof(): + if isinstance(exc, OSError): + exc = ClientOSError(*exc.args) + if exc is None: + exc = ServerDisconnectedError(uncompleted) + # assigns self._should_close to True as side effect, + # we do it anyway below + self.set_exception(exc) + + self._should_close = True + self._parser = None + self._payload = None + self._payload_parser = None + self._reading_paused = False + + super().connection_lost(exc) + + def eof_received(self) -> None: + # should call parser.feed_eof() most likely + self._drop_timeout() + + def pause_reading(self) -> None: + super().pause_reading() + self._drop_timeout() + + def resume_reading(self) -> None: + super().resume_reading() + self._reschedule_timeout() + + def set_exception(self, exc: BaseException) -> None: + self._should_close = True + self._drop_timeout() + super().set_exception(exc) + + def set_parser(self, parser: Any, payload: Any) -> None: + # TODO: actual types are: + # parser: WebSocketReader + # payload: FlowControlDataQueue + # but they are not generi enough + # Need an ABC for both types + self._payload = payload + self._payload_parser = parser + + self._drop_timeout() + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def set_response_params( + self, + *, + timer: Optional[BaseTimerContext] = None, + skip_payload: bool = False, + read_until_eof: bool = False, + auto_decompress: bool = True, + read_timeout: Optional[float] = None, + read_bufsize: int = 2**16, + ) -> None: + self._skip_payload = skip_payload + + self._read_timeout = read_timeout + self._reschedule_timeout() + + self._parser = HttpResponseParser( + self, + self._loop, + read_bufsize, + timer=timer, + payload_exception=ClientPayloadError, + response_with_body=not skip_payload, + read_until_eof=read_until_eof, + auto_decompress=auto_decompress, + ) + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def _drop_timeout(self) -> None: + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + self._read_timeout_handle = None + + def _reschedule_timeout(self) -> None: + timeout = self._read_timeout + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + + if timeout: + self._read_timeout_handle = self._loop.call_later( + timeout, self._on_read_timeout + ) + else: + self._read_timeout_handle = None + + def _on_read_timeout(self) -> None: + exc = ServerTimeoutError("Timeout on reading data from socket") + self.set_exception(exc) + if self._payload is not None: + self._payload.set_exception(exc) + + def data_received(self, data: bytes) -> None: + self._reschedule_timeout() + + if not data: + return + + # custom payload parser + if self._payload_parser is not None: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self._payload = None + self._payload_parser = None + + if tail: + self.data_received(tail) + return + else: + if self._upgraded or self._parser is None: + # i.e. websocket connection, websocket parser is not set yet + self._tail += data + else: + # parse http messages + try: + messages, upgraded, tail = self._parser.feed_data(data) + except BaseException as exc: + if self.transport is not None: + # connection.release() could be called BEFORE + # data_received(), the transport is already + # closed in this case + self.transport.close() + # should_close is True after the call + self.set_exception(exc) + return + + self._upgraded = upgraded + + payload: Optional[StreamReader] = None + for message, payload in messages: + if message.should_close: + self._should_close = True + + self._payload = payload + + if self._skip_payload or message.code in (204, 304): + self.feed_data((message, EMPTY_PAYLOAD), 0) + else: + self.feed_data((message, payload), 0) + if payload is not None: + # new message(s) was processed + # register timeout handler unsubscribing + # either on end-of-stream or immediately for + # EMPTY_PAYLOAD + if payload is not EMPTY_PAYLOAD: + payload.on_eof(self._drop_timeout) + else: + self._drop_timeout() + + if tail: + if upgraded: + self.data_received(tail) + else: + self._tail = tail diff --git a/venv/lib/python3.8/site-packages/aiohttp/client_reqrep.py b/venv/lib/python3.8/site-packages/aiohttp/client_reqrep.py new file mode 100644 index 0000000..28b8a28 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/client_reqrep.py @@ -0,0 +1,1134 @@ +import asyncio +import codecs +import functools +import io +import re +import sys +import traceback +import warnings +from hashlib import md5, sha1, sha256 +from http.cookies import CookieError, Morsel, SimpleCookie +from types import MappingProxyType, TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + List, + Mapping, + Optional, + Tuple, + Type, + Union, + cast, +) + +import attr +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs, helpers, http, multipart, payload +from .abc import AbstractStreamWriter +from .client_exceptions import ( + ClientConnectionError, + ClientOSError, + ClientResponseError, + ContentTypeError, + InvalidURL, + ServerFingerprintMismatch, +) +from .formdata import FormData +from .helpers import ( + PY_36, + BaseTimerContext, + BasicAuth, + HeadersMixin, + TimerNoop, + noop, + reify, + set_result, +) +from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter +from .log import client_logger +from .streams import StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + JSONDecoder, + LooseCookies, + LooseHeaders, + RawHeaders, +) + +try: + import ssl + from ssl import SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + +try: + import cchardet as chardet +except ImportError: # pragma: no cover + import charset_normalizer as chardet # type: ignore[no-redef] + + +__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") + + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientSession + from .connector import Connection + from .tracing import Trace + + +json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ContentDisposition: + type: Optional[str] + parameters: "MappingProxyType[str, str]" + filename: Optional[str] + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class RequestInfo: + url: URL + method: str + headers: "CIMultiDictProxy[str]" + real_url: URL = attr.ib() + + @real_url.default + def real_url_default(self) -> URL: + return self.url + + +class Fingerprint: + HASHFUNC_BY_DIGESTLEN = { + 16: md5, + 20: sha1, + 32: sha256, + } + + def __init__(self, fingerprint: bytes) -> None: + digestlen = len(fingerprint) + hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen) + if not hashfunc: + raise ValueError("fingerprint has invalid length") + elif hashfunc is md5 or hashfunc is sha1: + raise ValueError( + "md5 and sha1 are insecure and " "not supported. Use sha256." + ) + self._hashfunc = hashfunc + self._fingerprint = fingerprint + + @property + def fingerprint(self) -> bytes: + return self._fingerprint + + def check(self, transport: asyncio.Transport) -> None: + if not transport.get_extra_info("sslcontext"): + return + sslobj = transport.get_extra_info("ssl_object") + cert = sslobj.getpeercert(binary_form=True) + got = self._hashfunc(cert).digest() + if got != self._fingerprint: + host, port, *_ = transport.get_extra_info("peername") + raise ServerFingerprintMismatch(self._fingerprint, got, host, port) + + +if ssl is not None: + SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) +else: # pragma: no cover + SSL_ALLOWED_TYPES = type(None) + + +def _merge_ssl_params( + ssl: Union["SSLContext", bool, Fingerprint, None], + verify_ssl: Optional[bool], + ssl_context: Optional["SSLContext"], + fingerprint: Optional[bytes], +) -> Union["SSLContext", bool, Fingerprint, None]: + if verify_ssl is not None and not verify_ssl: + warnings.warn( + "verify_ssl is deprecated, use ssl=False instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not None: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = False + if ssl_context is not None: + warnings.warn( + "ssl_context is deprecated, use ssl=context instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not None: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = ssl_context + if fingerprint is not None: + warnings.warn( + "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not None: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = Fingerprint(fingerprint) + if not isinstance(ssl, SSL_ALLOWED_TYPES): + raise TypeError( + "ssl should be SSLContext, bool, Fingerprint or None, " + "got {!r} instead.".format(ssl) + ) + return ssl + + +@attr.s(auto_attribs=True, slots=True, frozen=True) +class ConnectionKey: + # the key should contain an information about used proxy / TLS + # to prevent reusing wrong connections from a pool + host: str + port: Optional[int] + is_ssl: bool + ssl: Union[SSLContext, None, bool, Fingerprint] + proxy: Optional[URL] + proxy_auth: Optional[BasicAuth] + proxy_headers_hash: Optional[int] # hash(CIMultiDict) + + +def _is_expected_content_type( + response_content_type: str, expected_content_type: str +) -> bool: + if expected_content_type == "application/json": + return json_re.match(response_content_type) is not None + return expected_content_type in response_content_type + + +class ClientRequest: + GET_METHODS = { + hdrs.METH_GET, + hdrs.METH_HEAD, + hdrs.METH_OPTIONS, + hdrs.METH_TRACE, + } + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} + ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) + + DEFAULT_HEADERS = { + hdrs.ACCEPT: "*/*", + hdrs.ACCEPT_ENCODING: "gzip, deflate", + } + + body = b"" + auth = None + response = None + + _writer = None # async task for streaming data + _continue = None # waiter future for '100 Continue' response + + # N.B. + # Adding __del__ method with self._writer closing doesn't make sense + # because _writer is instance method, thus it keeps a reference to self. + # Until writer has finished finalizer will not be called. + + def __init__( + self, + method: str, + url: URL, + *, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Iterable[str] = frozenset(), + data: Any = None, + cookies: Optional[LooseCookies] = None, + auth: Optional[BasicAuth] = None, + version: http.HttpVersion = http.HttpVersion11, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + response_class: Optional[Type["ClientResponse"]] = None, + proxy: Optional[URL] = None, + proxy_auth: Optional[BasicAuth] = None, + timer: Optional[BaseTimerContext] = None, + session: Optional["ClientSession"] = None, + ssl: Union[SSLContext, bool, Fingerprint, None] = None, + proxy_headers: Optional[LooseHeaders] = None, + traces: Optional[List["Trace"]] = None, + ): + + if loop is None: + loop = asyncio.get_event_loop() + + assert isinstance(url, URL), url + assert isinstance(proxy, (URL, type(None))), proxy + # FIXME: session is None in tests only, need to fix tests + # assert session is not None + self._session = cast("ClientSession", session) + if params: + q = MultiDict(url.query) + url2 = url.with_query(params) + q.extend(url2.query) + url = url.with_query(q) + self.original_url = url + self.url = url.with_fragment(None) + self.method = method.upper() + self.chunked = chunked + self.compress = compress + self.loop = loop + self.length = None + if response_class is None: + real_response_class = ClientResponse + else: + real_response_class = response_class + self.response_class: Type[ClientResponse] = real_response_class + self._timer = timer if timer is not None else TimerNoop() + self._ssl = ssl + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self.update_version(version) + self.update_host(url) + self.update_headers(headers) + self.update_auto_headers(skip_auto_headers) + self.update_cookies(cookies) + self.update_content_encoding(data) + self.update_auth(auth) + self.update_proxy(proxy, proxy_auth, proxy_headers) + + self.update_body_from_data(data) + if data is not None or self.method not in self.GET_METHODS: + self.update_transfer_encoding() + self.update_expect_continue(expect100) + if traces is None: + traces = [] + self._traces = traces + + def is_ssl(self) -> bool: + return self.url.scheme in ("https", "wss") + + @property + def ssl(self) -> Union["SSLContext", None, bool, Fingerprint]: + return self._ssl + + @property + def connection_key(self) -> ConnectionKey: + proxy_headers = self.proxy_headers + if proxy_headers: + h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items())) + else: + h = None + return ConnectionKey( + self.host, + self.port, + self.is_ssl(), + self.ssl, + self.proxy, + self.proxy_auth, + h, + ) + + @property + def host(self) -> str: + ret = self.url.raw_host + assert ret is not None + return ret + + @property + def port(self) -> Optional[int]: + return self.url.port + + @property + def request_info(self) -> RequestInfo: + headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) + return RequestInfo(self.url, self.method, headers, self.original_url) + + def update_host(self, url: URL) -> None: + """Update destination host, port and connection type (ssl).""" + # get host/port + if not url.raw_host: + raise InvalidURL(url) + + # basic auth info + username, password = url.user, url.password + if username: + self.auth = helpers.BasicAuth(username, password or "") + + def update_version(self, version: Union[http.HttpVersion, str]) -> None: + """Convert request version to two elements tuple. + + parser HTTP version '1.1' => (1, 1) + """ + if isinstance(version, str): + v = [part.strip() for part in version.split(".", 1)] + try: + version = http.HttpVersion(int(v[0]), int(v[1])) + except ValueError: + raise ValueError( + f"Can not parse http version number: {version}" + ) from None + self.version = version + + def update_headers(self, headers: Optional[LooseHeaders]) -> None: + """Update request headers.""" + self.headers: CIMultiDict[str] = CIMultiDict() + + # add host + netloc = cast(str, self.url.raw_host) + if helpers.is_ipv6_address(netloc): + netloc = f"[{netloc}]" + if self.url.port is not None and not self.url.is_default_port(): + netloc += ":" + str(self.url.port) + self.headers[hdrs.HOST] = netloc + + if headers: + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() # type: ignore[assignment] + + for key, value in headers: # type: ignore[misc] + # A special case for Host header + if key.lower() == "host": + self.headers[key] = value + else: + self.headers.add(key, value) + + def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: + self.skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + + for hdr, val in self.DEFAULT_HEADERS.items(): + if hdr not in used_headers: + self.headers.add(hdr, val) + + if hdrs.USER_AGENT not in used_headers: + self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE + + def update_cookies(self, cookies: Optional[LooseCookies]) -> None: + """Update request cookies header.""" + if not cookies: + return + + c: SimpleCookie[str] = SimpleCookie() + if hdrs.COOKIE in self.headers: + c.load(self.headers.get(hdrs.COOKIE, "")) + del self.headers[hdrs.COOKIE] + + if isinstance(cookies, Mapping): + iter_cookies = cookies.items() + else: + iter_cookies = cookies # type: ignore[assignment] + for name, value in iter_cookies: + if isinstance(value, Morsel): + # Preserve coded_value + mrsl_val = value.get(value.key, Morsel()) + mrsl_val.set(value.key, value.value, value.coded_value) + c[name] = mrsl_val + else: + c[name] = value # type: ignore[assignment] + + self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() + + def update_content_encoding(self, data: Any) -> None: + """Set request content encoding.""" + if data is None: + return + + enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() + if enc: + if self.compress: + raise ValueError( + "compress can not be set " "if Content-Encoding header is set" + ) + elif self.compress: + if not isinstance(self.compress, str): + self.compress = "deflate" + self.headers[hdrs.CONTENT_ENCODING] = self.compress + self.chunked = True # enable chunked, no need to deal with length + + def update_transfer_encoding(self) -> None: + """Analyze transfer-encoding header.""" + te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() + + if "chunked" in te: + if self.chunked: + raise ValueError( + "chunked can not be set " + 'if "Transfer-Encoding: chunked" header is set' + ) + + elif self.chunked: + if hdrs.CONTENT_LENGTH in self.headers: + raise ValueError( + "chunked can not be set " "if Content-Length header is set" + ) + + self.headers[hdrs.TRANSFER_ENCODING] = "chunked" + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) + + def update_auth(self, auth: Optional[BasicAuth]) -> None: + """Set basic auth.""" + if auth is None: + auth = self.auth + if auth is None: + return + + if not isinstance(auth, helpers.BasicAuth): + raise TypeError("BasicAuth() tuple is required instead") + + self.headers[hdrs.AUTHORIZATION] = auth.encode() + + def update_body_from_data(self, body: Any) -> None: + if body is None: + return + + # FormData + if isinstance(body, FormData): + body = body() + + try: + body = payload.PAYLOAD_REGISTRY.get(body, disposition=None) + except payload.LookupError: + body = FormData(body)() + + self.body = body + + # enable chunked encoding if needed + if not self.chunked: + if hdrs.CONTENT_LENGTH not in self.headers: + size = body.size + if size is None: + self.chunked = True + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(size) + + # copy payload headers + assert body.headers + for (key, value) in body.headers.items(): + if key in self.headers: + continue + if key in self.skip_auto_headers: + continue + self.headers[key] = value + + def update_expect_continue(self, expect: bool = False) -> None: + if expect: + self.headers[hdrs.EXPECT] = "100-continue" + elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue": + expect = True + + if expect: + self._continue = self.loop.create_future() + + def update_proxy( + self, + proxy: Optional[URL], + proxy_auth: Optional[BasicAuth], + proxy_headers: Optional[LooseHeaders], + ) -> None: + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): + raise ValueError("proxy_auth must be None or BasicAuth() tuple") + self.proxy = proxy + self.proxy_auth = proxy_auth + self.proxy_headers = proxy_headers + + def keep_alive(self) -> bool: + if self.version < HttpVersion10: + # keep alive not supported at all + return False + if self.version == HttpVersion10: + if self.headers.get(hdrs.CONNECTION) == "keep-alive": + return True + else: # no headers means we close for Http 1.0 + return False + elif self.headers.get(hdrs.CONNECTION) == "close": + return False + + return True + + async def write_bytes( + self, writer: AbstractStreamWriter, conn: "Connection" + ) -> None: + """Support coroutines that yields bytes objects.""" + # 100 response + if self._continue is not None: + await writer.drain() + await self._continue + + protocol = conn.protocol + assert protocol is not None + try: + if isinstance(self.body, payload.Payload): + await self.body.write(writer) + else: + if isinstance(self.body, (bytes, bytearray)): + self.body = (self.body,) # type: ignore[assignment] + + for chunk in self.body: + await writer.write(chunk) # type: ignore[arg-type] + + await writer.write_eof() + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + protocol.set_exception(exc) + else: + new_exc = ClientOSError( + exc.errno, "Can not write request body for %s" % self.url + ) + new_exc.__context__ = exc + new_exc.__cause__ = exc + protocol.set_exception(new_exc) + except asyncio.CancelledError as exc: + if not conn.closed: + protocol.set_exception(exc) + except Exception as exc: + protocol.set_exception(exc) + finally: + self._writer = None + + async def send(self, conn: "Connection") -> "ClientResponse": + # Specify request target: + # - CONNECT request must send authority form URI + # - not CONNECT proxy must send absolute form URI + # - most common is origin form URI + if self.method == hdrs.METH_CONNECT: + connect_host = self.url.raw_host + assert connect_host is not None + if helpers.is_ipv6_address(connect_host): + connect_host = f"[{connect_host}]" + path = f"{connect_host}:{self.url.port}" + elif self.proxy and not self.is_ssl(): + path = str(self.url) + else: + path = self.url.raw_path + if self.url.raw_query_string: + path += "?" + self.url.raw_query_string + + protocol = conn.protocol + assert protocol is not None + writer = StreamWriter( + protocol, + self.loop, + on_chunk_sent=functools.partial( + self._on_chunk_request_sent, self.method, self.url + ), + on_headers_sent=functools.partial( + self._on_headers_request_sent, self.method, self.url + ), + ) + + if self.compress: + writer.enable_compression(self.compress) + + if self.chunked is not None: + writer.enable_chunking() + + # set default content-type + if ( + self.method in self.POST_METHODS + and hdrs.CONTENT_TYPE not in self.skip_auto_headers + and hdrs.CONTENT_TYPE not in self.headers + ): + self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" + + # set the connection header + connection = self.headers.get(hdrs.CONNECTION) + if not connection: + if self.keep_alive(): + if self.version == HttpVersion10: + connection = "keep-alive" + else: + if self.version == HttpVersion11: + connection = "close" + + if connection is not None: + self.headers[hdrs.CONNECTION] = connection + + # status + headers + status_line = "{0} {1} HTTP/{2[0]}.{2[1]}".format( + self.method, path, self.version + ) + await writer.write_headers(status_line, self.headers) + + self._writer = self.loop.create_task(self.write_bytes(writer, conn)) + + response_class = self.response_class + assert response_class is not None + self.response = response_class( + self.method, + self.original_url, + writer=self._writer, + continue100=self._continue, + timer=self._timer, + request_info=self.request_info, + traces=self._traces, + loop=self.loop, + session=self._session, + ) + return self.response + + async def close(self) -> None: + if self._writer is not None: + try: + await self._writer + finally: + self._writer = None + + def terminate(self) -> None: + if self._writer is not None: + if not self.loop.is_closed(): + self._writer.cancel() + self._writer = None + + async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: + for trace in self._traces: + await trace.send_request_chunk_sent(method, url, chunk) + + async def _on_headers_request_sent( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + for trace in self._traces: + await trace.send_request_headers(method, url, headers) + + +class ClientResponse(HeadersMixin): + + # from the Status-Line of the response + version = None # HTTP-Version + status: int = None # type: ignore[assignment] # Status-Code + reason = None # Reason-Phrase + + content: StreamReader = None # type: ignore[assignment] # Payload stream + _headers: "CIMultiDictProxy[str]" = None # type: ignore[assignment] + _raw_headers: RawHeaders = None # type: ignore[assignment] # Response raw headers + + _connection = None # current connection + _source_traceback = None + # setted up by ClientRequest after ClientResponse object creation + # post-init stage allows to not change ctor signature + _closed = True # to allow __del__ for non-initialized properly response + _released = False + + def __init__( + self, + method: str, + url: URL, + *, + writer: "asyncio.Task[None]", + continue100: Optional["asyncio.Future[bool]"], + timer: BaseTimerContext, + request_info: RequestInfo, + traces: List["Trace"], + loop: asyncio.AbstractEventLoop, + session: "ClientSession", + ) -> None: + assert isinstance(url, URL) + + self.method = method + self.cookies: SimpleCookie[str] = SimpleCookie() + + self._real_url = url + self._url = url.with_fragment(None) + self._body: Any = None + self._writer: Optional[asyncio.Task[None]] = writer + self._continue = continue100 # None by default + self._closed = True + self._history: Tuple[ClientResponse, ...] = () + self._request_info = request_info + self._timer = timer if timer is not None else TimerNoop() + self._cache: Dict[str, Any] = {} + self._traces = traces + self._loop = loop + # store a reference to session #1985 + self._session: Optional[ClientSession] = session + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + @reify + def url(self) -> URL: + return self._url + + @reify + def url_obj(self) -> URL: + warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) + return self._url + + @reify + def real_url(self) -> URL: + return self._real_url + + @reify + def host(self) -> str: + assert self._url.host is not None + return self._url.host + + @reify + def headers(self) -> "CIMultiDictProxy[str]": + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + return self._raw_headers + + @reify + def request_info(self) -> RequestInfo: + return self._request_info + + @reify + def content_disposition(self) -> Optional[ContentDisposition]: + raw = self._headers.get(hdrs.CONTENT_DISPOSITION) + if raw is None: + return None + disposition_type, params_dct = multipart.parse_content_disposition(raw) + params = MappingProxyType(params_dct) + filename = multipart.content_disposition_filename(params) + return ContentDisposition(disposition_type, params, filename) + + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return + + if self._connection is not None: + self._connection.release() + self._cleanup_writer() + + if self._loop.get_debug(): + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs) + context = {"client_response": self, "message": "Unclosed response"} + if self._source_traceback: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __repr__(self) -> str: + out = io.StringIO() + ascii_encodable_url = str(self.url) + if self.reason: + ascii_encodable_reason = self.reason.encode( + "ascii", "backslashreplace" + ).decode("ascii") + else: + ascii_encodable_reason = self.reason + print( + "".format( + ascii_encodable_url, self.status, ascii_encodable_reason + ), + file=out, + ) + print(self.headers, file=out) + return out.getvalue() + + @property + def connection(self) -> Optional["Connection"]: + return self._connection + + @reify + def history(self) -> Tuple["ClientResponse", ...]: + """A sequence of of responses, if redirects occurred.""" + return self._history + + @reify + def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": + links_str = ", ".join(self.headers.getall("link", [])) + + if not links_str: + return MultiDictProxy(MultiDict()) + + links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() + + for val in re.split(r",(?=\s*<)", links_str): + match = re.match(r"\s*<(.*)>(.*)", val) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + url, params_str = match.groups() + params = params_str.split(";")[1:] + + link: MultiDict[Union[str, URL]] = MultiDict() + + for param in params: + match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + key, _, value, _ = match.groups() + + link.add(key, value) + + key = link.get("rel", url) # type: ignore[assignment] + + link.add("url", self.url.join(URL(url))) + + links.add(key, MultiDictProxy(link)) + + return MultiDictProxy(links) + + async def start(self, connection: "Connection") -> "ClientResponse": + """Start response processing.""" + self._closed = False + self._protocol = connection.protocol + self._connection = connection + + with self._timer: + while True: + # read response + try: + protocol = self._protocol + message, payload = await protocol.read() # type: ignore[union-attr] + except http.HttpProcessingError as exc: + raise ClientResponseError( + self.request_info, + self.history, + status=exc.code, + message=exc.message, + headers=exc.headers, + ) from exc + + if message.code < 100 or message.code > 199 or message.code == 101: + break + + if self._continue is not None: + set_result(self._continue, True) + self._continue = None + + # payload eof handler + payload.on_eof(self._response_eof) + + # response status + self.version = message.version + self.status = message.code + self.reason = message.reason + + # headers + self._headers = message.headers # type is CIMultiDictProxy + self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] + + # payload + self.content = payload + + # cookies + for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): + try: + self.cookies.load(hdr) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + return self + + def _response_eof(self) -> None: + if self._closed: + return + + if self._connection is not None: + # websocket, protocol could be None because + # connection could be detached + if ( + self._connection.protocol is not None + and self._connection.protocol.upgraded + ): + return + + self._connection.release() + self._connection = None + + self._closed = True + self._cleanup_writer() + + @property + def closed(self) -> bool: + return self._closed + + def close(self) -> None: + if not self._released: + self._notify_content() + if self._closed: + return + + self._closed = True + if self._loop is None or self._loop.is_closed(): + return + + if self._connection is not None: + self._connection.close() + self._connection = None + self._cleanup_writer() + + def release(self) -> Any: + if not self._released: + self._notify_content() + if self._closed: + return noop() + + self._closed = True + if self._connection is not None: + self._connection.release() + self._connection = None + + self._cleanup_writer() + return noop() + + @property + def ok(self) -> bool: + """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. + + This is **not** a check for ``200 OK`` but a check that the response + status is under 400. + """ + return 400 > self.status + + def raise_for_status(self) -> None: + if not self.ok: + # reason should always be not None for a started response + assert self.reason is not None + self.release() + raise ClientResponseError( + self.request_info, + self.history, + status=self.status, + message=self.reason, + headers=self.headers, + ) + + def _cleanup_writer(self) -> None: + if self._writer is not None: + self._writer.cancel() + self._writer = None + self._session = None + + def _notify_content(self) -> None: + content = self.content + if content and content.exception() is None: + content.set_exception(ClientConnectionError("Connection closed")) + self._released = True + + async def wait_for_close(self) -> None: + if self._writer is not None: + try: + await self._writer + finally: + self._writer = None + self.release() + + async def read(self) -> bytes: + """Read response payload.""" + if self._body is None: + try: + self._body = await self.content.read() + for trace in self._traces: + await trace.send_response_chunk_received( + self.method, self.url, self._body + ) + except BaseException: + self.close() + raise + elif self._released: + raise ClientConnectionError("Connection closed") + + return self._body # type: ignore[no-any-return] + + def get_encoding(self) -> str: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + mimetype = helpers.parse_mimetype(ctype) + + encoding = mimetype.parameters.get("charset") + if encoding: + try: + codecs.lookup(encoding) + except LookupError: + encoding = None + if not encoding: + if mimetype.type == "application" and ( + mimetype.subtype == "json" or mimetype.subtype == "rdap" + ): + # RFC 7159 states that the default encoding is UTF-8. + # RFC 7483 defines application/rdap+json + encoding = "utf-8" + elif self._body is None: + raise RuntimeError( + "Cannot guess the encoding of " "a not yet read body" + ) + else: + encoding = chardet.detect(self._body)["encoding"] + if not encoding: + encoding = "utf-8" + + return encoding + + async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: + """Read response payload and decode.""" + if self._body is None: + await self.read() + + if encoding is None: + encoding = self.get_encoding() + + return self._body.decode( # type: ignore[no-any-return,union-attr] + encoding, errors=errors + ) + + async def json( + self, + *, + encoding: Optional[str] = None, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + content_type: Optional[str] = "application/json", + ) -> Any: + """Read and decodes JSON response.""" + if self._body is None: + await self.read() + + if content_type: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + if not _is_expected_content_type(ctype, content_type): + raise ContentTypeError( + self.request_info, + self.history, + message=( + "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype + ), + headers=self.headers, + ) + + stripped = self._body.strip() # type: ignore[union-attr] + if not stripped: + return None + + if encoding is None: + encoding = self.get_encoding() + + return loads(stripped.decode(encoding)) + + async def __aenter__(self) -> "ClientResponse": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + # similar to _RequestContextManager, we do not need to check + # for exceptions, response object can close connection + # if state is broken + self.release() diff --git a/venv/lib/python3.8/site-packages/aiohttp/client_ws.py b/venv/lib/python3.8/site-packages/aiohttp/client_ws.py new file mode 100644 index 0000000..9a8ba84 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/client_ws.py @@ -0,0 +1,300 @@ +"""WebSocket client for asyncio.""" + +import asyncio +from typing import Any, Optional, cast + +import async_timeout + +from .client_exceptions import ClientError +from .client_reqrep import ClientResponse +from .helpers import call_later, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WebSocketError, + WSCloseCode, + WSMessage, + WSMsgType, +) +from .http_websocket import WebSocketWriter # WSMessage +from .streams import EofStream, FlowControlDataQueue +from .typedefs import ( + DEFAULT_JSON_DECODER, + DEFAULT_JSON_ENCODER, + JSONDecoder, + JSONEncoder, +) + + +class ClientWebSocketResponse: + def __init__( + self, + reader: "FlowControlDataQueue[WSMessage]", + writer: WebSocketWriter, + protocol: Optional[str], + response: ClientResponse, + timeout: float, + autoclose: bool, + autoping: bool, + loop: asyncio.AbstractEventLoop, + *, + receive_timeout: Optional[float] = None, + heartbeat: Optional[float] = None, + compress: int = 0, + client_notakeover: bool = False, + ) -> None: + self._response = response + self._conn = response.connection + + self._writer = writer + self._reader = reader + self._protocol = protocol + self._closed = False + self._closing = False + self._close_code: Optional[int] = None + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb: Optional[asyncio.TimerHandle] = None + self._loop = loop + self._waiting: Optional[asyncio.Future[bool]] = None + self._exception: Optional[BaseException] = None + self._compress = compress + self._client_notakeover = client_notakeover + + self._reset_heartbeat() + + def _cancel_heartbeat(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + + def _reset_heartbeat(self) -> None: + self._cancel_heartbeat() + + if self._heartbeat is not None: + self._heartbeat_cb = call_later( + self._send_heartbeat, self._heartbeat, self._loop + ) + + def _send_heartbeat(self) -> None: + if self._heartbeat is not None and not self._closed: + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + self._loop.create_task(self._writer.ping()) + + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = call_later( + self._pong_not_received, self._pong_heartbeat, self._loop + ) + + def _pong_not_received(self) -> None: + if not self._closed: + self._closed = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = asyncio.TimeoutError() + self._response.close() + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def protocol(self) -> Optional[str]: + return self._protocol + + @property + def compress(self) -> int: + return self._compress + + @property + def client_notakeover(self) -> bool: + return self._client_notakeover + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """extra info from connection transport""" + conn = self._response.connection + if conn is None: + return default + transport = conn.transport + if transport is None: + return default + return transport.get_extra_info(name, default) + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes = b"") -> None: + await self._writer.ping(message) + + async def pong(self, message: bytes = b"") -> None: + await self._writer.pong(message) + + async def send_str(self, data: str, compress: Optional[int] = None) -> None: + if not isinstance(data, str): + raise TypeError("data argument must be str (%r)" % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError("data argument must be byte-ish (%r)" % type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json( + self, + data: Any, + compress: Optional[int] = None, + *, + dumps: JSONEncoder = DEFAULT_JSON_ENCODER, + ) -> None: + await self.send_str(dumps(data), compress=compress) + + async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting is not None and not self._closed: + self._reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._waiting + + if not self._closed: + self._cancel_heartbeat() + self._closed = True + try: + await self._writer.close(code, message) + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if self._closing: + self._response.close() + return True + + while True: + try: + async with async_timeout.timeout(self._timeout): + msg = await self._reader.read() + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + self._response.close() + return True + else: + return False + + async def receive(self, timeout: Optional[float] = None) -> WSMessage: + while True: + if self._waiting is not None: + raise RuntimeError("Concurrent call to receive() is not allowed") + + if self._closed: + return WS_CLOSED_MESSAGE + elif self._closing: + await self.close() + return WS_CLOSED_MESSAGE + + try: + self._waiting = self._loop.create_future() + try: + async with async_timeout.timeout(timeout or self._receive_timeout): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + waiter = self._waiting + self._waiting = None + set_result(waiter, True) + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except EofStream: + self._close_code = WSCloseCode.OK + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except ClientError: + self._closed = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + return WS_CLOSED_MESSAGE + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + await self.close() + elif msg.type == WSMsgType.CLOSING: + self._closing = True + elif msg.type == WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type == WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float] = None) -> str: + msg = await self.receive(timeout) + if msg.type != WSMsgType.TEXT: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") + return cast(str, msg.data) + + async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + msg = await self.receive(timeout) + if msg.type != WSMsgType.BINARY: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + return cast(bytes, msg.data) + + async def receive_json( + self, + *, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + timeout: Optional[float] = None, + ) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + def __aiter__(self) -> "ClientWebSocketResponse": + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): + raise StopAsyncIteration + return msg diff --git a/venv/lib/python3.8/site-packages/aiohttp/connector.py b/venv/lib/python3.8/site-packages/aiohttp/connector.py new file mode 100644 index 0000000..bf40689 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/connector.py @@ -0,0 +1,1453 @@ +import asyncio +import functools +import random +import sys +import traceback +import warnings +from collections import defaultdict, deque +from contextlib import suppress +from http.cookies import SimpleCookie +from itertools import cycle, islice +from time import monotonic +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + DefaultDict, + Dict, + Iterator, + List, + Optional, + Set, + Tuple, + Type, + Union, + cast, +) + +import attr + +from . import hdrs, helpers +from .abc import AbstractResolver +from .client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientHttpProxyError, + ClientProxyConnectionError, + ServerFingerprintMismatch, + UnixClientConnectorError, + cert_errors, + ssl_errors, +) +from .client_proto import ResponseHandler +from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params +from .helpers import ( + PY_36, + ceil_timeout, + get_running_loop, + is_ip_address, + noop, + sentinel, +) +from .http import RESPONSES +from .locks import EventResultOrError +from .resolver import DefaultResolver + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") + + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientTimeout + from .client_reqrep import ConnectionKey + from .tracing import Trace + + +class _DeprecationWaiter: + __slots__ = ("_awaitable", "_awaited") + + def __init__(self, awaitable: Awaitable[Any]) -> None: + self._awaitable = awaitable + self._awaited = False + + def __await__(self) -> Any: + self._awaited = True + return self._awaitable.__await__() + + def __del__(self) -> None: + if not self._awaited: + warnings.warn( + "Connector.close() is a coroutine, " + "please use await connector.close()", + DeprecationWarning, + ) + + +class Connection: + + _source_traceback = None + _transport = None + + def __init__( + self, + connector: "BaseConnector", + key: "ConnectionKey", + protocol: ResponseHandler, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._key = key + self._connector = connector + self._loop = loop + self._protocol: Optional[ResponseHandler] = protocol + self._callbacks: List[Callable[[], None]] = [] + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __repr__(self) -> str: + return f"Connection<{self._key}>" + + def __del__(self, _warnings: Any = warnings) -> None: + if self._protocol is not None: + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs) + if self._loop.is_closed(): + return + + self._connector._release(self._key, self._protocol, should_close=True) + + context = {"client_connection": self, "message": "Unclosed connection"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + @property + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "connector.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def protocol(self) -> Optional[ResponseHandler]: + return self._protocol + + def add_callback(self, callback: Callable[[], None]) -> None: + if callback is not None: + self._callbacks.append(callback) + + def _notify_release(self) -> None: + callbacks, self._callbacks = self._callbacks[:], [] + + for cb in callbacks: + with suppress(Exception): + cb() + + def close(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release(self._key, self._protocol, should_close=True) + self._protocol = None + + def release(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release( + self._key, self._protocol, should_close=self._protocol.should_close + ) + self._protocol = None + + @property + def closed(self) -> bool: + return self._protocol is None or not self._protocol.is_connected() + + +class _TransportPlaceholder: + """placeholder for BaseConnector.connect function""" + + def close(self) -> None: + pass + + +class BaseConnector: + """Base connector class. + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + loop - Optional event loop. + """ + + _closed = True # prevent AttributeError in __del__ if ctor was failed + _source_traceback = None + + # abort transport after 2 seconds (cleanup broken connections) + _cleanup_closed_period = 2.0 + + def __init__( + self, + *, + keepalive_timeout: Union[object, None, float] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + + if force_close: + if keepalive_timeout is not None and keepalive_timeout is not sentinel: + raise ValueError( + "keepalive_timeout cannot " "be set if force_close is True" + ) + else: + if keepalive_timeout is sentinel: + keepalive_timeout = 15.0 + + loop = get_running_loop(loop) + + self._closed = False + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {} + self._limit = limit + self._limit_per_host = limit_per_host + self._acquired: Set[ResponseHandler] = set() + self._acquired_per_host: DefaultDict[ + ConnectionKey, Set[ResponseHandler] + ] = defaultdict(set) + self._keepalive_timeout = cast(float, keepalive_timeout) + self._force_close = force_close + + # {host_key: FIFO list of waiters} + self._waiters = defaultdict(deque) # type: ignore[var-annotated] + + self._loop = loop + self._factory = functools.partial(ResponseHandler, loop=loop) + + self.cookies: SimpleCookie[str] = SimpleCookie() + + # start keep-alive connection cleanup task + self._cleanup_handle: Optional[asyncio.TimerHandle] = None + + # start cleanup closed transports task + self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None + self._cleanup_closed_disabled = not enable_cleanup_closed + self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] + self._cleanup_closed() + + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return + if not self._conns: + return + + conns = [repr(c) for c in self._conns.values()] + + self._close() + + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs) + context = { + "connector": self, + "connections": conns, + "message": "Unclosed connector", + } + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __enter__(self) -> "BaseConnector": + warnings.warn( + '"with Connector():" is deprecated, ' + 'use "async with Connector():" instead', + DeprecationWarning, + ) + return self + + def __exit__(self, *exc: Any) -> None: + self._close() + + async def __aenter__(self) -> "BaseConnector": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + exc_traceback: Optional[TracebackType] = None, + ) -> None: + await self.close() + + @property + def force_close(self) -> bool: + """Ultimately close connection on releasing if True.""" + return self._force_close + + @property + def limit(self) -> int: + """The total number for simultaneous connections. + + If limit is 0 the connector has no limit. + The default limit size is 100. + """ + return self._limit + + @property + def limit_per_host(self) -> int: + """The limit for simultaneous connections to the same endpoint. + + Endpoints are the same if they are have equal + (host, port, is_ssl) triple. + """ + return self._limit_per_host + + def _cleanup(self) -> None: + """Cleanup unused transports.""" + if self._cleanup_handle: + self._cleanup_handle.cancel() + # _cleanup_handle should be unset, otherwise _release() will not + # recreate it ever! + self._cleanup_handle = None + + now = self._loop.time() + timeout = self._keepalive_timeout + + if self._conns: + connections = {} + deadline = now - timeout + for key, conns in self._conns.items(): + alive = [] + for proto, use_time in conns: + if proto.is_connected(): + if use_time - deadline < 0: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + alive.append((proto, use_time)) + else: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + + if alive: + connections[key] = alive + + self._conns = connections + + if self._conns: + self._cleanup_handle = helpers.weakref_handle( + self, "_cleanup", timeout, self._loop + ) + + def _drop_acquired_per_host( + self, key: "ConnectionKey", val: ResponseHandler + ) -> None: + acquired_per_host = self._acquired_per_host + if key not in acquired_per_host: + return + conns = acquired_per_host[key] + conns.remove(val) + if not conns: + del self._acquired_per_host[key] + + def _cleanup_closed(self) -> None: + """Double confirmation for transport close. + + Some broken ssl servers may leave socket open without proper close. + """ + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + self._cleanup_closed_transports = [] + + if not self._cleanup_closed_disabled: + self._cleanup_closed_handle = helpers.weakref_handle( + self, "_cleanup_closed", self._cleanup_closed_period, self._loop + ) + + def close(self) -> Awaitable[None]: + """Close all opened transports.""" + self._close() + return _DeprecationWaiter(noop()) + + def _close(self) -> None: + if self._closed: + return + + self._closed = True + + try: + if self._loop.is_closed(): + return + + # cancel cleanup task + if self._cleanup_handle: + self._cleanup_handle.cancel() + + # cancel cleanup close task + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for data in self._conns.values(): + for proto, t0 in data: + proto.close() + + for proto in self._acquired: + proto.close() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + finally: + self._conns.clear() + self._acquired.clear() + self._waiters.clear() + self._cleanup_handle = None + self._cleanup_closed_transports.clear() + self._cleanup_closed_handle = None + + @property + def closed(self) -> bool: + """Is connector closed. + + A readonly property. + """ + return self._closed + + def _available_connections(self, key: "ConnectionKey") -> int: + """ + Return number of available connections. + + The limit, limit_per_host and the connection key are taken into account. + + If it returns less than 1 means that there are no connections + available. + """ + if self._limit: + # total calc available connections + available = self._limit - len(self._acquired) + + # check limit per host + if ( + self._limit_per_host + and available > 0 + and key in self._acquired_per_host + ): + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + + elif self._limit_per_host and key in self._acquired_per_host: + # check limit per host + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + else: + available = 1 + + return available + + async def connect( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> Connection: + """Get from pool or create new connection.""" + key = req.connection_key + available = self._available_connections(key) + + # Wait if there are no available connections or if there are/were + # waiters (i.e. don't steal connection from a waiter about to wake up) + if available <= 0 or key in self._waiters: + fut = self._loop.create_future() + + # This connection will now count towards the limit. + self._waiters[key].append(fut) + + if traces: + for trace in traces: + await trace.send_connection_queued_start() + + try: + await fut + except BaseException as e: + if key in self._waiters: + # remove a waiter even if it was cancelled, normally it's + # removed when it's notified + try: + self._waiters[key].remove(fut) + except ValueError: # fut may no longer be in list + pass + + raise e + finally: + if key in self._waiters and not self._waiters[key]: + del self._waiters[key] + + if traces: + for trace in traces: + await trace.send_connection_queued_end() + + proto = self._get(key) + if proto is None: + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_start() + + try: + proto = await self._create_connection(req, traces, timeout) + if self._closed: + proto.close() + raise ClientConnectionError("Connector is closed.") + except BaseException: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + self._release_waiter() + raise + else: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_end() + else: + if traces: + # Acquire the connection to prevent race conditions with limits + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + for trace in traces: + await trace.send_connection_reuseconn() + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + + self._acquired.add(proto) + self._acquired_per_host[key].add(proto) + return Connection(self, key, proto, self._loop) + + def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: + try: + conns = self._conns[key] + except KeyError: + return None + + t1 = self._loop.time() + while conns: + proto, t0 = conns.pop() + if proto.is_connected(): + if t1 - t0 > self._keepalive_timeout: + transport = proto.transport + proto.close() + # only for SSL transports + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + if not conns: + # The very last connection was reclaimed: drop the key + del self._conns[key] + return proto + else: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + + # No more connections: drop the key + del self._conns[key] + return None + + def _release_waiter(self) -> None: + """ + Iterates over all waiters until one to be released is found. + + The one to be released is not finsihed and + belongs to a host that has available connections. + """ + if not self._waiters: + return + + # Having the dict keys ordered this avoids to iterate + # at the same order at each call. + queues = list(self._waiters.keys()) + random.shuffle(queues) + + for key in queues: + if self._available_connections(key) < 1: + continue + + waiters = self._waiters[key] + while waiters: + waiter = waiters.popleft() + if not waiter.done(): + waiter.set_result(None) + return + + def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + try: + self._acquired.remove(proto) + self._drop_acquired_per_host(key, proto) + except KeyError: # pragma: no cover + # this may be result of undetermenistic order of objects + # finalization due garbage collection. + pass + else: + self._release_waiter() + + def _release( + self, + key: "ConnectionKey", + protocol: ResponseHandler, + *, + should_close: bool = False, + ) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + self._release_acquired(key, protocol) + + if self._force_close: + should_close = True + + if should_close or protocol.should_close: + transport = protocol.transport + protocol.close() + + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + conns = self._conns.get(key) + if conns is None: + conns = self._conns[key] = [] + conns.append((protocol, self._loop.time())) + + if self._cleanup_handle is None: + self._cleanup_handle = helpers.weakref_handle( + self, "_cleanup", self._keepalive_timeout, self._loop + ) + + async def _create_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + raise NotImplementedError() + + +class _DNSCacheTable: + def __init__(self, ttl: Optional[float] = None) -> None: + self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {} + self._timestamps: Dict[Tuple[str, int], float] = {} + self._ttl = ttl + + def __contains__(self, host: object) -> bool: + return host in self._addrs_rr + + def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None: + self._addrs_rr[key] = (cycle(addrs), len(addrs)) + + if self._ttl: + self._timestamps[key] = monotonic() + + def remove(self, key: Tuple[str, int]) -> None: + self._addrs_rr.pop(key, None) + + if self._ttl: + self._timestamps.pop(key, None) + + def clear(self) -> None: + self._addrs_rr.clear() + self._timestamps.clear() + + def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]: + loop, length = self._addrs_rr[key] + addrs = list(islice(loop, length)) + # Consume one more element to shift internal state of `cycle` + next(loop) + return addrs + + def expired(self, key: Tuple[str, int]) -> bool: + if self._ttl is None: + return False + + return self._timestamps[key] + self._ttl < monotonic() + + +class TCPConnector(BaseConnector): + """TCP connector. + + verify_ssl - Set to True to check ssl certifications. + fingerprint - Pass the binary sha256 + digest of the expected certificate in DER format to verify + that the certificate the server presents matches. See also + https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning + resolver - Enable DNS lookups and use this + resolver + use_dns_cache - Use memory cache for DNS lookups. + ttl_dns_cache - Max seconds having cached a DNS entry, None forever. + family - socket address family + local_addr - local tuple of (host, port) to bind socket to + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + loop - Optional event loop. + """ + + def __init__( + self, + *, + verify_ssl: bool = True, + fingerprint: Optional[bytes] = None, + use_dns_cache: bool = True, + ttl_dns_cache: Optional[int] = 10, + family: int = 0, + ssl_context: Optional[SSLContext] = None, + ssl: Union[None, bool, Fingerprint, SSLContext] = None, + local_addr: Optional[Tuple[str, int]] = None, + resolver: Optional[AbstractResolver] = None, + keepalive_timeout: Union[None, float, object] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + ): + super().__init__( + keepalive_timeout=keepalive_timeout, + force_close=force_close, + limit=limit, + limit_per_host=limit_per_host, + enable_cleanup_closed=enable_cleanup_closed, + loop=loop, + ) + + self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + if resolver is None: + resolver = DefaultResolver(loop=self._loop) + self._resolver = resolver + + self._use_dns_cache = use_dns_cache + self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) + self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {} + self._family = family + self._local_addr = local_addr + + def close(self) -> Awaitable[None]: + """Close all ongoing DNS calls.""" + for ev in self._throttle_dns_events.values(): + ev.cancel() + + return super().close() + + @property + def family(self) -> int: + """Socket family like AF_INET.""" + return self._family + + @property + def use_dns_cache(self) -> bool: + """True if local DNS caching is enabled.""" + return self._use_dns_cache + + def clear_dns_cache( + self, host: Optional[str] = None, port: Optional[int] = None + ) -> None: + """Remove specified host/port or clear all dns local cache.""" + if host is not None and port is not None: + self._cached_hosts.remove((host, port)) + elif host is not None or port is not None: + raise ValueError("either both host and port " "or none of them are allowed") + else: + self._cached_hosts.clear() + + async def _resolve_host( + self, host: str, port: int, traces: Optional[List["Trace"]] = None + ) -> List[Dict[str, Any]]: + if is_ip_address(host): + return [ + { + "hostname": host, + "host": host, + "port": port, + "family": self._family, + "proto": 0, + "flags": 0, + } + ] + + if not self._use_dns_cache: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + res = await self._resolver.resolve(host, port, family=self._family) + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + return res + + key = (host, port) + + if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)): + # get result early, before any await (#4014) + result = self._cached_hosts.next_addrs(key) + + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + return result + + if key in self._throttle_dns_events: + # get event early, before any await (#4014) + event = self._throttle_dns_events[key] + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + await event.wait() + else: + # update dict early, before any await (#4014) + self._throttle_dns_events[key] = EventResultOrError(self._loop) + if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + try: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + addrs = await self._resolver.resolve(host, port, family=self._family) + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + self._cached_hosts.add(key, addrs) + self._throttle_dns_events[key].set() + except BaseException as e: + # any DNS exception, independently of the implementation + # is set for the waiters to raise the same exception. + self._throttle_dns_events[key].set(exc=e) + raise + finally: + self._throttle_dns_events.pop(key) + + return self._cached_hosts.next_addrs(key) + + async def _create_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + """Create connection. + + Has same keyword arguments as BaseEventLoop.create_connection. + """ + if req.proxy: + _, proto = await self._create_proxy_connection(req, traces, timeout) + else: + _, proto = await self._create_direct_connection(req, traces, timeout) + + return proto + + @staticmethod + @functools.lru_cache(None) + def _make_ssl_context(verified: bool) -> SSLContext: + if verified: + return ssl.create_default_context() + else: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + try: + sslcontext.options |= ssl.OP_NO_COMPRESSION + except AttributeError as attr_err: + warnings.warn( + "{!s}: The Python interpreter is compiled " + "against OpenSSL < 1.0.0. Ref: " + "https://docs.python.org/3/library/ssl.html" + "#ssl.OP_NO_COMPRESSION".format(attr_err), + ) + sslcontext.set_default_verify_paths() + return sslcontext + + def _get_ssl_context(self, req: "ClientRequest") -> Optional[SSLContext]: + """Logic to get the correct SSL context + + 0. if req.ssl is false, return None + + 1. if ssl_context is specified in req, use it + 2. if _ssl_context is specified in self, use it + 3. otherwise: + 1. if verify_ssl is not specified in req, use self.ssl_context + (will generate a default context according to self.verify_ssl) + 2. if verify_ssl is True in req, generate a default SSL context + 3. if verify_ssl is False in req, generate a SSL context that + won't verify + """ + if req.is_ssl(): + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + sslcontext = req.ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not None: + # not verified or fingerprinted + return self._make_ssl_context(False) + sslcontext = self._ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not None: + # not verified or fingerprinted + return self._make_ssl_context(False) + return self._make_ssl_context(True) + else: + return None + + def _get_fingerprint(self, req: "ClientRequest") -> Optional["Fingerprint"]: + ret = req.ssl + if isinstance(ret, Fingerprint): + return ret + ret = self._ssl + if isinstance(ret, Fingerprint): + return ret + return None + + async def _wrap_create_connection( + self, + *args: Any, + req: "ClientRequest", + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + **kwargs: Any, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + async with ceil_timeout(timeout.sock_connect): + return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + + def _fail_on_no_start_tls(self, req: "ClientRequest") -> None: + """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``. + + One case is that :py:meth:`asyncio.loop.start_tls` is not yet + implemented under Python 3.6. It is necessary for TLS-in-TLS so + that it is possible to send HTTPS queries through HTTPS proxies. + + This doesn't affect regular HTTP requests, though. + """ + if not req.is_ssl(): + return + + proxy_url = req.proxy + assert proxy_url is not None + if proxy_url.scheme != "https": + return + + self._check_loop_for_start_tls() + + def _check_loop_for_start_tls(self) -> None: + try: + self._loop.start_tls + except AttributeError as attr_exc: + raise RuntimeError( + "An HTTPS request is being sent through an HTTPS proxy. " + "This needs support for TLS in TLS but it is not implemented " + "in your runtime for the stdlib asyncio.\n\n" + "Please upgrade to Python 3.7 or higher. For more details, " + "please see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n" + "* https://docs.aiohttp.org/en/stable/" + "client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + ) from attr_exc + + def _loop_supports_start_tls(self) -> bool: + try: + self._check_loop_for_start_tls() + except RuntimeError: + return False + else: + return True + + def _warn_about_tls_in_tls( + self, + underlying_transport: asyncio.Transport, + req: "ClientRequest", + ) -> None: + """Issue a warning if the requested URL has HTTPS scheme.""" + if req.request_info.url.scheme != "https": + return + + asyncio_supports_tls_in_tls = getattr( + underlying_transport, + "_start_tls_compatible", + False, + ) + + if asyncio_supports_tls_in_tls: + return + + warnings.warn( + "An HTTPS request is being sent through an HTTPS proxy. " + "This support for TLS in TLS is known to be disabled " + "in the stdlib asyncio. This is why you'll probably see " + "an error in the log below.\n\n" + "It is possible to enable it via monkeypatching under " + "Python 3.7 or higher. For more details, see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n\n" + "You can temporarily patch this as follows:\n" + "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + RuntimeWarning, + source=self, + # Why `4`? At least 3 of the calls in the stack originate + # from the methods in this class. + stacklevel=3, + ) + + async def _start_tls_connection( + self, + underlying_transport: asyncio.Transport, + req: "ClientRequest", + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + """Wrap the raw TCP transport with TLS.""" + tls_proto = self._factory() # Create a brand new proto for TLS + + # Safety of the `cast()` call here is based on the fact that + # internally `_get_ssl_context()` only returns `None` when + # `req.is_ssl()` evaluates to `False` which is never gonna happen + # in this code path. Of course, it's rather fragile + # maintainability-wise but this is to be solved separately. + sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req)) + + try: + async with ceil_timeout(timeout.sock_connect): + try: + tls_transport = await self._loop.start_tls( + underlying_transport, + tls_proto, + sslcontext, + server_hostname=req.host, + ssl_handshake_timeout=timeout.total, + ) + except BaseException: + # We need to close the underlying transport since + # `start_tls()` probably failed before it had a + # chance to do this: + underlying_transport.close() + raise + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + except TypeError as type_err: + # Example cause looks like this: + # TypeError: transport is not supported by start_tls() + + raise ClientConnectionError( + "Cannot initialize a TLS-in-TLS connection to host " + f"{req.host!s}:{req.port:d} through an underlying connection " + f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} " + f"[{type_err!s}]" + ) from type_err + else: + tls_proto.connection_made( + tls_transport + ) # Kick the state machine of the new TLS protocol + + return tls_transport, tls_proto + + async def _create_direct_connection( + self, + req: "ClientRequest", + traces: List["Trace"], + timeout: "ClientTimeout", + *, + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + sslcontext = self._get_ssl_context(req) + fingerprint = self._get_fingerprint(req) + + host = req.url.raw_host + assert host is not None + port = req.port + assert port is not None + host_resolved = asyncio.ensure_future( + self._resolve_host(host, port, traces=traces), loop=self._loop + ) + try: + # Cancelling this lookup should not cancel the underlying lookup + # or else the cancel event will get broadcast to all the waiters + # across all connections. + hosts = await asyncio.shield(host_resolved) + except asyncio.CancelledError: + + def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + with suppress(Exception, asyncio.CancelledError): + fut.result() + + host_resolved.add_done_callback(drop_exception) + raise + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + # in case of proxy it is not ClientProxyConnectionError + # it is problem of resolving proxy ip itself + raise ClientConnectorError(req.connection_key, exc) from exc + + last_exc: Optional[Exception] = None + + for hinfo in hosts: + host = hinfo["host"] + port = hinfo["port"] + + try: + transp, proto = await self._wrap_create_connection( + self._factory, + host, + port, + timeout=timeout, + ssl=sslcontext, + family=hinfo["family"], + proto=hinfo["proto"], + flags=hinfo["flags"], + server_hostname=hinfo["hostname"] if sslcontext else None, + local_addr=self._local_addr, + req=req, + client_error=client_error, + ) + except ClientConnectorError as exc: + last_exc = exc + continue + + if req.is_ssl() and fingerprint: + try: + fingerprint.check(transp) + except ServerFingerprintMismatch as exc: + transp.close() + if not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transp) + last_exc = exc + continue + + return transp, proto + else: + assert last_exc is not None + raise last_exc + + async def _create_proxy_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + self._fail_on_no_start_tls(req) + runtime_has_start_tls = self._loop_supports_start_tls() + + headers: Dict[str, str] = {} + if req.proxy_headers is not None: + headers = req.proxy_headers # type: ignore[assignment] + headers[hdrs.HOST] = req.headers[hdrs.HOST] + + url = req.proxy + assert url is not None + proxy_req = ClientRequest( + hdrs.METH_GET, + url, + headers=headers, + auth=req.proxy_auth, + loop=self._loop, + ssl=req.ssl, + ) + + # create connection to proxy server + transport, proto = await self._create_direct_connection( + proxy_req, [], timeout, client_error=ClientProxyConnectionError + ) + + # Many HTTP proxies has buggy keepalive support. Let's not + # reuse connection but close it after processing every + # response. + proto.force_close() + + auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) + if auth is not None: + if not req.is_ssl(): + req.headers[hdrs.PROXY_AUTHORIZATION] = auth + else: + proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth + + if req.is_ssl(): + if runtime_has_start_tls: + self._warn_about_tls_in_tls(transport, req) + + # For HTTPS requests over HTTP proxy + # we must notify proxy to tunnel connection + # so we send CONNECT command: + # CONNECT www.python.org:443 HTTP/1.1 + # Host: www.python.org + # + # next we must do TLS handshake and so on + # to do this we must wrap raw socket into secure one + # asyncio handles this perfectly + proxy_req.method = hdrs.METH_CONNECT + proxy_req.url = req.url + key = attr.evolve( + req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None + ) + conn = Connection(self, key, proto, self._loop) + proxy_resp = await proxy_req.send(conn) + try: + protocol = conn._protocol + assert protocol is not None + + # read_until_eof=True will ensure the connection isn't closed + # once the response is received and processed allowing + # START_TLS to work on the connection below. + protocol.set_response_params(read_until_eof=runtime_has_start_tls) + resp = await proxy_resp.start(conn) + except BaseException: + proxy_resp.close() + conn.close() + raise + else: + conn._protocol = None + conn._transport = None + try: + if resp.status != 200: + message = resp.reason + if message is None: + message = RESPONSES[resp.status][0] + raise ClientHttpProxyError( + proxy_resp.request_info, + resp.history, + status=resp.status, + message=message, + headers=resp.headers, + ) + if not runtime_has_start_tls: + rawsock = transport.get_extra_info("socket", default=None) + if rawsock is None: + raise RuntimeError( + "Transport does not expose socket instance" + ) + # Duplicate the socket, so now we can close proxy transport + rawsock = rawsock.dup() + except BaseException: + # It shouldn't be closed in `finally` because it's fed to + # `loop.start_tls()` and the docs say not to touch it after + # passing there. + transport.close() + raise + finally: + if not runtime_has_start_tls: + transport.close() + + if not runtime_has_start_tls: + # HTTP proxy with support for upgrade to HTTPS + sslcontext = self._get_ssl_context(req) + return await self._wrap_create_connection( + self._factory, + timeout=timeout, + ssl=sslcontext, + sock=rawsock, + server_hostname=req.host, + req=req, + ) + + return await self._start_tls_connection( + # Access the old transport for the last time before it's + # closed and forgotten forever: + transport, + req=req, + timeout=timeout, + ) + finally: + proxy_resp.close() + + return transport, proto + + +class UnixConnector(BaseConnector): + """Unix socket connector. + + path - Unix socket path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + self._path = path + + @property + def path(self) -> str: + """Path to unix socket.""" + return self._path + + async def _create_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout(timeout.sock_connect): + _, proto = await self._loop.create_unix_connection( + self._factory, self._path + ) + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc + + return cast(ResponseHandler, proto) + + +class NamedPipeConnector(BaseConnector): + """Named pipe connector. + + Only supported by the proactor event loop. + See also: https://docs.python.org/3.7/library/asyncio-eventloop.html + + path - Windows named pipe path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + if not isinstance( + self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): + raise RuntimeError( + "Named Pipes only available in proactor " "loop under windows" + ) + self._path = path + + @property + def path(self) -> str: + """Path to the named pipe.""" + return self._path + + async def _create_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout(timeout.sock_connect): + _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501 + self._factory, self._path + ) + # the drain is required so that the connection_made is called + # and transport is set otherwise it is not set before the + # `assert conn.transport is not None` + # in client.py's _request method + await asyncio.sleep(0) + # other option is to manually set transport like + # `proto.transport = trans` + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientConnectorError(req.connection_key, exc) from exc + + return cast(ResponseHandler, proto) diff --git a/venv/lib/python3.8/site-packages/aiohttp/cookiejar.py b/venv/lib/python3.8/site-packages/aiohttp/cookiejar.py new file mode 100644 index 0000000..1ac8854 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/cookiejar.py @@ -0,0 +1,411 @@ +import asyncio +import contextlib +import datetime +import os # noqa +import pathlib +import pickle +import re +from collections import defaultdict +from http.cookies import BaseCookie, Morsel, SimpleCookie +from typing import ( # noqa + DefaultDict, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Set, + Tuple, + Union, + cast, +) + +from yarl import URL + +from .abc import AbstractCookieJar, ClearCookiePredicate +from .helpers import is_ip_address, next_whole_second +from .typedefs import LooseCookies, PathLike, StrOrURL + +__all__ = ("CookieJar", "DummyCookieJar") + + +CookieItem = Union[str, "Morsel[str]"] + + +class CookieJar(AbstractCookieJar): + """Implements cookie storage adhering to RFC 6265.""" + + DATE_TOKENS_RE = re.compile( + r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" + r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)" + ) + + DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") + + DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") + + DATE_MONTH_RE = re.compile( + "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)", + re.I, + ) + + DATE_YEAR_RE = re.compile(r"(\d{2,4})") + + MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc) + + MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2**31 - 1) + + def __init__( + self, + *, + unsafe: bool = False, + quote_cookie: bool = True, + treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__(loop=loop) + self._cookies: DefaultDict[str, SimpleCookie[str]] = defaultdict(SimpleCookie) + self._host_only_cookies: Set[Tuple[str, str]] = set() + self._unsafe = unsafe + self._quote_cookie = quote_cookie + if treat_as_secure_origin is None: + treat_as_secure_origin = [] + elif isinstance(treat_as_secure_origin, URL): + treat_as_secure_origin = [treat_as_secure_origin.origin()] + elif isinstance(treat_as_secure_origin, str): + treat_as_secure_origin = [URL(treat_as_secure_origin).origin()] + else: + treat_as_secure_origin = [ + URL(url).origin() if isinstance(url, str) else url.origin() + for url in treat_as_secure_origin + ] + self._treat_as_secure_origin = treat_as_secure_origin + self._next_expiration = next_whole_second() + self._expirations: Dict[Tuple[str, str], datetime.datetime] = {} + # #4515: datetime.max may not be representable on 32-bit platforms + self._max_time = self.MAX_TIME + try: + self._max_time.timestamp() + except OverflowError: + self._max_time = self.MAX_32BIT_TIME + + def save(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="wb") as f: + pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) + + def load(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="rb") as f: + self._cookies = pickle.load(f) + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + if predicate is None: + self._next_expiration = next_whole_second() + self._cookies.clear() + self._host_only_cookies.clear() + self._expirations.clear() + return + + to_del = [] + now = datetime.datetime.now(datetime.timezone.utc) + for domain, cookie in self._cookies.items(): + for name, morsel in cookie.items(): + key = (domain, name) + if ( + key in self._expirations and self._expirations[key] <= now + ) or predicate(morsel): + to_del.append(key) + + for domain, name in to_del: + key = (domain, name) + self._host_only_cookies.discard(key) + if key in self._expirations: + del self._expirations[(domain, name)] + self._cookies[domain].pop(name, None) + + next_expiration = min(self._expirations.values(), default=self._max_time) + try: + self._next_expiration = next_expiration.replace( + microsecond=0 + ) + datetime.timedelta(seconds=1) + except OverflowError: + self._next_expiration = self._max_time + + def clear_domain(self, domain: str) -> None: + self.clear(lambda x: self._is_domain_match(domain, x["domain"])) + + def __iter__(self) -> "Iterator[Morsel[str]]": + self._do_expiration() + for val in self._cookies.values(): + yield from val.values() + + def __len__(self) -> int: + return sum(1 for i in self) + + def _do_expiration(self) -> None: + self.clear(lambda x: False) + + def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None: + self._next_expiration = min(self._next_expiration, when) + self._expirations[(domain, name)] = when + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + hostname = response_url.raw_host + + if not self._unsafe and is_ip_address(hostname): + # Don't accept cookies from IPs + return + + if isinstance(cookies, Mapping): + cookies = cookies.items() + + for name, cookie in cookies: + if not isinstance(cookie, Morsel): + tmp: SimpleCookie[str] = SimpleCookie() + tmp[name] = cookie # type: ignore[assignment] + cookie = tmp[name] + + domain = cookie["domain"] + + # ignore domains with trailing dots + if domain.endswith("."): + domain = "" + del cookie["domain"] + + if not domain and hostname is not None: + # Set the cookie's domain to the response hostname + # and set its host-only-flag + self._host_only_cookies.add((hostname, name)) + domain = cookie["domain"] = hostname + + if domain.startswith("."): + # Remove leading dot + domain = domain[1:] + cookie["domain"] = domain + + if hostname and not self._is_domain_match(domain, hostname): + # Setting cookies for different domains is not allowed + continue + + path = cookie["path"] + if not path or not path.startswith("/"): + # Set the cookie's path to the response path + path = response_url.path + if not path.startswith("/"): + path = "/" + else: + # Cut everything from the last slash to the end + path = "/" + path[1 : path.rfind("/")] + cookie["path"] = path + + max_age = cookie["max-age"] + if max_age: + try: + delta_seconds = int(max_age) + try: + max_age_expiration = datetime.datetime.now( + datetime.timezone.utc + ) + datetime.timedelta(seconds=delta_seconds) + except OverflowError: + max_age_expiration = self._max_time + self._expire_cookie(max_age_expiration, domain, name) + except ValueError: + cookie["max-age"] = "" + + else: + expires = cookie["expires"] + if expires: + expire_time = self._parse_date(expires) + if expire_time: + self._expire_cookie(expire_time, domain, name) + else: + cookie["expires"] = "" + + self._cookies[domain][name] = cookie + + self._do_expiration() + + def filter_cookies( + self, request_url: URL = URL() + ) -> Union["BaseCookie[str]", "SimpleCookie[str]"]: + """Returns this jar's cookies filtered by their attributes.""" + self._do_expiration() + request_url = URL(request_url) + filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = ( + SimpleCookie() if self._quote_cookie else BaseCookie() + ) + hostname = request_url.raw_host or "" + request_origin = URL() + with contextlib.suppress(ValueError): + request_origin = request_url.origin() + + is_not_secure = ( + request_url.scheme not in ("https", "wss") + and request_origin not in self._treat_as_secure_origin + ) + + for cookie in self: + name = cookie.key + domain = cookie["domain"] + + # Send shared cookies + if not domain: + filtered[name] = cookie.value + continue + + if not self._unsafe and is_ip_address(hostname): + continue + + if (domain, name) in self._host_only_cookies: + if domain != hostname: + continue + elif not self._is_domain_match(domain, hostname): + continue + + if not self._is_path_match(request_url.path, cookie["path"]): + continue + + if is_not_secure and cookie["secure"]: + continue + + # It's critical we use the Morsel so the coded_value + # (based on cookie version) is preserved + mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) + mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + filtered[name] = mrsl_val + + return filtered + + @staticmethod + def _is_domain_match(domain: str, hostname: str) -> bool: + """Implements domain matching adhering to RFC 6265.""" + if hostname == domain: + return True + + if not hostname.endswith(domain): + return False + + non_matching = hostname[: -len(domain)] + + if not non_matching.endswith("."): + return False + + return not is_ip_address(hostname) + + @staticmethod + def _is_path_match(req_path: str, cookie_path: str) -> bool: + """Implements path matching adhering to RFC 6265.""" + if not req_path.startswith("/"): + req_path = "/" + + if req_path == cookie_path: + return True + + if not req_path.startswith(cookie_path): + return False + + if cookie_path.endswith("/"): + return True + + non_matching = req_path[len(cookie_path) :] + + return non_matching.startswith("/") + + @classmethod + def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]: + """Implements date string parsing adhering to RFC 6265.""" + if not date_str: + return None + + found_time = False + found_day = False + found_month = False + found_year = False + + hour = minute = second = 0 + day = 0 + month = 0 + year = 0 + + for token_match in cls.DATE_TOKENS_RE.finditer(date_str): + + token = token_match.group("token") + + if not found_time: + time_match = cls.DATE_HMS_TIME_RE.match(token) + if time_match: + found_time = True + hour, minute, second = (int(s) for s in time_match.groups()) + continue + + if not found_day: + day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) + if day_match: + found_day = True + day = int(day_match.group()) + continue + + if not found_month: + month_match = cls.DATE_MONTH_RE.match(token) + if month_match: + found_month = True + assert month_match.lastindex is not None + month = month_match.lastindex + continue + + if not found_year: + year_match = cls.DATE_YEAR_RE.match(token) + if year_match: + found_year = True + year = int(year_match.group()) + + if 70 <= year <= 99: + year += 1900 + elif 0 <= year <= 69: + year += 2000 + + if False in (found_day, found_month, found_year, found_time): + return None + + if not 1 <= day <= 31: + return None + + if year < 1601 or hour > 23 or minute > 59 or second > 59: + return None + + return datetime.datetime( + year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc + ) + + +class DummyCookieJar(AbstractCookieJar): + """Implements a dummy cookie storage. + + It can be used with the ClientSession when no cookie processing is needed. + + """ + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + super().__init__(loop=loop) + + def __iter__(self) -> "Iterator[Morsel[str]]": + while False: + yield None + + def __len__(self) -> int: + return 0 + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + pass + + def clear_domain(self, domain: str) -> None: + pass + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + pass + + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + return SimpleCookie() diff --git a/venv/lib/python3.8/site-packages/aiohttp/formdata.py b/venv/lib/python3.8/site-packages/aiohttp/formdata.py new file mode 100644 index 0000000..e7cd24c --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/formdata.py @@ -0,0 +1,172 @@ +import io +from typing import Any, Iterable, List, Optional +from urllib.parse import urlencode + +from multidict import MultiDict, MultiDictProxy + +from . import hdrs, multipart, payload +from .helpers import guess_filename +from .payload import Payload + +__all__ = ("FormData",) + + +class FormData: + """Helper class for form body generation. + + Supports multipart/form-data and application/x-www-form-urlencoded. + """ + + def __init__( + self, + fields: Iterable[Any] = (), + quote_fields: bool = True, + charset: Optional[str] = None, + ) -> None: + self._writer = multipart.MultipartWriter("form-data") + self._fields: List[Any] = [] + self._is_multipart = False + self._is_processed = False + self._quote_fields = quote_fields + self._charset = charset + + if isinstance(fields, dict): + fields = list(fields.items()) + elif not isinstance(fields, (list, tuple)): + fields = (fields,) + self.add_fields(*fields) + + @property + def is_multipart(self) -> bool: + return self._is_multipart + + def add_field( + self, + name: str, + value: Any, + *, + content_type: Optional[str] = None, + filename: Optional[str] = None, + content_transfer_encoding: Optional[str] = None, + ) -> None: + + if isinstance(value, io.IOBase): + self._is_multipart = True + elif isinstance(value, (bytes, bytearray, memoryview)): + if filename is None and content_transfer_encoding is None: + filename = name + + type_options: MultiDict[str] = MultiDict({"name": name}) + if filename is not None and not isinstance(filename, str): + raise TypeError( + "filename must be an instance of str. " "Got: %s" % filename + ) + if filename is None and isinstance(value, io.IOBase): + filename = guess_filename(value, name) + if filename is not None: + type_options["filename"] = filename + self._is_multipart = True + + headers = {} + if content_type is not None: + if not isinstance(content_type, str): + raise TypeError( + "content_type must be an instance of str. " "Got: %s" % content_type + ) + headers[hdrs.CONTENT_TYPE] = content_type + self._is_multipart = True + if content_transfer_encoding is not None: + if not isinstance(content_transfer_encoding, str): + raise TypeError( + "content_transfer_encoding must be an instance" + " of str. Got: %s" % content_transfer_encoding + ) + headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding + self._is_multipart = True + + self._fields.append((type_options, headers, value)) + + def add_fields(self, *fields: Any) -> None: + to_add = list(fields) + + while to_add: + rec = to_add.pop(0) + + if isinstance(rec, io.IOBase): + k = guess_filename(rec, "unknown") + self.add_field(k, rec) # type: ignore[arg-type] + + elif isinstance(rec, (MultiDictProxy, MultiDict)): + to_add.extend(rec.items()) + + elif isinstance(rec, (list, tuple)) and len(rec) == 2: + k, fp = rec + self.add_field(k, fp) # type: ignore[arg-type] + + else: + raise TypeError( + "Only io.IOBase, multidict and (name, file) " + "pairs allowed, use .add_field() for passing " + "more complex parameters, got {!r}".format(rec) + ) + + def _gen_form_urlencoded(self) -> payload.BytesPayload: + # form data (x-www-form-urlencoded) + data = [] + for type_options, _, value in self._fields: + data.append((type_options["name"], value)) + + charset = self._charset if self._charset is not None else "utf-8" + + if charset == "utf-8": + content_type = "application/x-www-form-urlencoded" + else: + content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset + + return payload.BytesPayload( + urlencode(data, doseq=True, encoding=charset).encode(), + content_type=content_type, + ) + + def _gen_form_data(self) -> multipart.MultipartWriter: + """Encode a list of fields using the multipart/form-data MIME format""" + if self._is_processed: + raise RuntimeError("Form data has been processed already") + for dispparams, headers, value in self._fields: + try: + if hdrs.CONTENT_TYPE in headers: + part = payload.get_payload( + value, + content_type=headers[hdrs.CONTENT_TYPE], + headers=headers, + encoding=self._charset, + ) + else: + part = payload.get_payload( + value, headers=headers, encoding=self._charset + ) + except Exception as exc: + raise TypeError( + "Can not serialize value type: %r\n " + "headers: %r\n value: %r" % (type(value), headers, value) + ) from exc + + if dispparams: + part.set_content_disposition( + "form-data", quote_fields=self._quote_fields, **dispparams + ) + # FIXME cgi.FieldStorage doesn't likes body parts with + # Content-Length which were sent via chunked transfer encoding + assert part.headers is not None + part.headers.popall(hdrs.CONTENT_LENGTH, None) + + self._writer.append_payload(part) + + self._is_processed = True + return self._writer + + def __call__(self) -> Payload: + if self._is_multipart: + return self._gen_form_data() + else: + return self._gen_form_urlencoded() diff --git a/venv/lib/python3.8/site-packages/aiohttp/hdrs.py b/venv/lib/python3.8/site-packages/aiohttp/hdrs.py new file mode 120000 index 0000000..ec125ab --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/hdrs.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a3/03/51/c34760a1d7835b2a1b0552e463cf1d2db90da0cdb473313dc66e34a031 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/helpers.py b/venv/lib/python3.8/site-packages/aiohttp/helpers.py new file mode 100644 index 0000000..0469ee4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/helpers.py @@ -0,0 +1,877 @@ +"""Various helper functions""" + +import asyncio +import base64 +import binascii +import datetime +import functools +import inspect +import netrc +import os +import platform +import re +import sys +import time +import warnings +import weakref +from collections import namedtuple +from contextlib import suppress +from email.parser import HeaderParser +from email.utils import parsedate +from math import ceil +from pathlib import Path +from types import TracebackType +from typing import ( + Any, + Callable, + ContextManager, + Dict, + Generator, + Generic, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from urllib.parse import quote +from urllib.request import getproxies, proxy_bypass + +import async_timeout +import attr +from multidict import MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs +from .log import client_logger, internal_logger +from .typedefs import PathLike, Protocol # noqa + +__all__ = ("BasicAuth", "ChainMapProxy", "ETag") + +IS_MACOS = platform.system() == "Darwin" +IS_WINDOWS = platform.system() == "Windows" + +PY_36 = sys.version_info >= (3, 6) +PY_37 = sys.version_info >= (3, 7) +PY_38 = sys.version_info >= (3, 8) +PY_310 = sys.version_info >= (3, 10) + +if sys.version_info < (3, 7): + import idna_ssl + + idna_ssl.patch_match_hostname() + + def all_tasks( + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> Set["asyncio.Task[Any]"]: + tasks = list(asyncio.Task.all_tasks(loop)) + return {t for t in tasks if not t.done()} + +else: + all_tasks = asyncio.all_tasks + + +_T = TypeVar("_T") +_S = TypeVar("_S") + + +sentinel: Any = object() +NO_EXTENSIONS: bool = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) + +# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr +# for compatibility with older versions +DEBUG: bool = getattr(sys.flags, "dev_mode", False) or ( + not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG")) +) + + +CHAR = {chr(i) for i in range(0, 128)} +CTL = {chr(i) for i in range(0, 32)} | { + chr(127), +} +SEPARATORS = { + "(", + ")", + "<", + ">", + "@", + ",", + ";", + ":", + "\\", + '"', + "/", + "[", + "]", + "?", + "=", + "{", + "}", + " ", + chr(9), +} +TOKEN = CHAR ^ CTL ^ SEPARATORS + + +class noop: + def __await__(self) -> Generator[None, None, None]: + yield + + +class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])): + """Http basic authentication helper.""" + + def __new__( + cls, login: str, password: str = "", encoding: str = "latin1" + ) -> "BasicAuth": + if login is None: + raise ValueError("None is not allowed as login value") + + if password is None: + raise ValueError("None is not allowed as password value") + + if ":" in login: + raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)') + + return super().__new__(cls, login, password, encoding) + + @classmethod + def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth": + """Create a BasicAuth object from an Authorization HTTP header.""" + try: + auth_type, encoded_credentials = auth_header.split(" ", 1) + except ValueError: + raise ValueError("Could not parse authorization header.") + + if auth_type.lower() != "basic": + raise ValueError("Unknown authorization method %s" % auth_type) + + try: + decoded = base64.b64decode( + encoded_credentials.encode("ascii"), validate=True + ).decode(encoding) + except binascii.Error: + raise ValueError("Invalid base64 encoding.") + + try: + # RFC 2617 HTTP Authentication + # https://www.ietf.org/rfc/rfc2617.txt + # the colon must be present, but the username and password may be + # otherwise blank. + username, password = decoded.split(":", 1) + except ValueError: + raise ValueError("Invalid credentials.") + + return cls(username, password, encoding=encoding) + + @classmethod + def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]: + """Create BasicAuth from url.""" + if not isinstance(url, URL): + raise TypeError("url should be yarl.URL instance") + if url.user is None: + return None + return cls(url.user, url.password or "", encoding=encoding) + + def encode(self) -> str: + """Encode credentials.""" + creds = (f"{self.login}:{self.password}").encode(self.encoding) + return "Basic %s" % base64.b64encode(creds).decode(self.encoding) + + +def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + auth = BasicAuth.from_url(url) + if auth is None: + return url, None + else: + return url.with_user(None), auth + + +def netrc_from_env() -> Optional[netrc.netrc]: + """Load netrc from file. + + Attempt to load it from the path specified by the env-var + NETRC or in the default location in the user's home directory. + + Returns None if it couldn't be found or fails to parse. + """ + netrc_env = os.environ.get("NETRC") + + if netrc_env is not None: + netrc_path = Path(netrc_env) + else: + try: + home_dir = Path.home() + except RuntimeError as e: # pragma: no cover + # if pathlib can't resolve home, it may raise a RuntimeError + client_logger.debug( + "Could not resolve home directory when " + "trying to look for .netrc file: %s", + e, + ) + return None + + netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc") + + try: + return netrc.netrc(str(netrc_path)) + except netrc.NetrcParseError as e: + client_logger.warning("Could not parse .netrc file: %s", e) + except OSError as e: + # we couldn't read the file (doesn't exist, permissions, etc.) + if netrc_env or netrc_path.is_file(): + # only warn if the environment wanted us to load it, + # or it appears like the default file does actually exist + client_logger.warning("Could not read .netrc file: %s", e) + + return None + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ProxyInfo: + proxy: URL + proxy_auth: Optional[BasicAuth] + + +def proxies_from_env() -> Dict[str, ProxyInfo]: + proxy_urls = { + k: URL(v) + for k, v in getproxies().items() + if k in ("http", "https", "ws", "wss") + } + netrc_obj = netrc_from_env() + stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} + ret = {} + for proto, val in stripped.items(): + proxy, auth = val + if proxy.scheme in ("https", "wss"): + client_logger.warning( + "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy + ) + continue + if netrc_obj and auth is None: + auth_from_netrc = None + if proxy.host is not None: + auth_from_netrc = netrc_obj.authenticators(proxy.host) + if auth_from_netrc is not None: + # auth_from_netrc is a (`user`, `account`, `password`) tuple, + # `user` and `account` both can be username, + # if `user` is None, use `account` + *logins, password = auth_from_netrc + login = logins[0] if logins[0] else logins[-1] + auth = BasicAuth(cast(str, login), cast(str, password)) + ret[proto] = ProxyInfo(proxy, auth) + return ret + + +def current_task( + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> "Optional[asyncio.Task[Any]]": + if sys.version_info >= (3, 7): + return asyncio.current_task(loop=loop) + else: + return asyncio.Task.current_task(loop=loop) + + +def get_running_loop( + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> asyncio.AbstractEventLoop: + if loop is None: + loop = asyncio.get_event_loop() + if not loop.is_running(): + warnings.warn( + "The object should be created within an async function", + DeprecationWarning, + stacklevel=3, + ) + if loop.get_debug(): + internal_logger.warning( + "The object should be created within an async function", stack_info=True + ) + return loop + + +def isasyncgenfunction(obj: Any) -> bool: + func = getattr(inspect, "isasyncgenfunction", None) + if func is not None: + return func(obj) # type: ignore[no-any-return] + else: + return False + + +def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + """Get a permitted proxy for the given URL from the env.""" + if url.host is not None and proxy_bypass(url.host): + raise LookupError(f"Proxying is disallowed for `{url.host!r}`") + + proxies_in_env = proxies_from_env() + try: + proxy_info = proxies_in_env[url.scheme] + except KeyError: + raise LookupError(f"No proxies found for `{url!s}` in the env") + else: + return proxy_info.proxy, proxy_info.proxy_auth + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class MimeType: + type: str + subtype: str + suffix: str + parameters: "MultiDictProxy[str]" + + +@functools.lru_cache(maxsize=56) +def parse_mimetype(mimetype: str) -> MimeType: + """Parses a MIME type into its components. + + mimetype is a MIME type string. + + Returns a MimeType object. + + Example: + + >>> parse_mimetype('text/html; charset=utf-8') + MimeType(type='text', subtype='html', suffix='', + parameters={'charset': 'utf-8'}) + + """ + if not mimetype: + return MimeType( + type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict()) + ) + + parts = mimetype.split(";") + params: MultiDict[str] = MultiDict() + for item in parts[1:]: + if not item: + continue + key, value = cast( + Tuple[str, str], item.split("=", 1) if "=" in item else (item, "") + ) + params.add(key.lower().strip(), value.strip(' "')) + + fulltype = parts[0].strip().lower() + if fulltype == "*": + fulltype = "*/*" + + mtype, stype = ( + cast(Tuple[str, str], fulltype.split("/", 1)) + if "/" in fulltype + else (fulltype, "") + ) + stype, suffix = ( + cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "") + ) + + return MimeType( + type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params) + ) + + +def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: + name = getattr(obj, "name", None) + if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": + return Path(name).name + return default + + +not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]") +QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"} + + +def quoted_string(content: str) -> str: + """Return 7-bit content as quoted-string. + + Format content into a quoted-string as defined in RFC5322 for + Internet Message Format. Notice that this is not the 8-bit HTTP + format, but the 7-bit email format. Content must be in usascii or + a ValueError is raised. + """ + if not (QCONTENT > set(content)): + raise ValueError(f"bad content for quoted-string {content!r}") + return not_qtext_re.sub(lambda x: "\\" + x.group(0), content) + + +def content_disposition_header( + disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str +) -> str: + """Sets ``Content-Disposition`` header for MIME. + + This is the MIME payload Content-Disposition header from RFC 2183 + and RFC 7579 section 4.2, not the HTTP Content-Disposition from + RFC 6266. + + disptype is a disposition type: inline, attachment, form-data. + Should be valid extension token (see RFC 2183) + + quote_fields performs value quoting to 7-bit MIME headers + according to RFC 7578. Set to quote_fields to False if recipient + can take 8-bit file names and field values. + + _charset specifies the charset to use when quote_fields is True. + + params is a dict with disposition params. + """ + if not disptype or not (TOKEN > set(disptype)): + raise ValueError("bad content disposition type {!r}" "".format(disptype)) + + value = disptype + if params: + lparams = [] + for key, val in params.items(): + if not key or not (TOKEN > set(key)): + raise ValueError( + "bad content disposition parameter" " {!r}={!r}".format(key, val) + ) + if quote_fields: + if key.lower() == "filename": + qval = quote(val, "", encoding=_charset) + lparams.append((key, '"%s"' % qval)) + else: + try: + qval = quoted_string(val) + except ValueError: + qval = "".join( + (_charset, "''", quote(val, "", encoding=_charset)) + ) + lparams.append((key + "*", qval)) + else: + lparams.append((key, '"%s"' % qval)) + else: + qval = val.replace("\\", "\\\\").replace('"', '\\"') + lparams.append((key, '"%s"' % qval)) + sparams = "; ".join("=".join(pair) for pair in lparams) + value = "; ".join((value, sparams)) + return value + + +class _TSelf(Protocol, Generic[_T]): + _cache: Dict[str, _T] + + +class reify(Generic[_T]): + """Use as a class method decorator. + + It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + """ + + def __init__(self, wrapped: Callable[..., _T]) -> None: + self.wrapped = wrapped + self.__doc__ = wrapped.__doc__ + self.name = wrapped.__name__ + + def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T: + try: + try: + return inst._cache[self.name] + except KeyError: + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + except AttributeError: + if inst is None: + return self + raise + + def __set__(self, inst: _TSelf[_T], value: _T) -> None: + raise AttributeError("reified property is read-only") + + +reify_py = reify + +try: + from ._helpers import reify as reify_c + + if not NO_EXTENSIONS: + reify = reify_c # type: ignore[misc,assignment] +except ImportError: + pass + +_ipv4_pattern = ( + r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}" + r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" +) +_ipv6_pattern = ( + r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}" + r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)" + r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})" + r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}" + r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}" + r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)" + r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}" + r":|:(:[A-F0-9]{1,4}){7})$" +) +_ipv4_regex = re.compile(_ipv4_pattern) +_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) +_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii")) +_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE) + + +def _is_ip_address( + regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]] +) -> bool: + if host is None: + return False + if isinstance(host, str): + return bool(regex.match(host)) + elif isinstance(host, (bytes, bytearray, memoryview)): + return bool(regexb.match(host)) + else: + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") + + +is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) +is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) + + +def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: + return is_ipv4_address(host) or is_ipv6_address(host) + + +def next_whole_second() -> datetime.datetime: + """Return current time rounded up to the next whole second.""" + return datetime.datetime.now(datetime.timezone.utc).replace( + microsecond=0 + ) + datetime.timedelta(seconds=0) + + +_cached_current_datetime: Optional[int] = None +_cached_formatted_datetime = "" + + +def rfc822_formatted_time() -> str: + global _cached_current_datetime + global _cached_formatted_datetime + + now = int(time.time()) + if now != _cached_current_datetime: + # Weekday and month names for HTTP date/time formatting; + # always English! + # Tuples are constants stored in codeobject! + _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") + _monthname = ( + "", # Dummy so we can use 1-based month numbers + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ) + + year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now) + _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + _weekdayname[wd], + day, + _monthname[month], + year, + hh, + mm, + ss, + ) + _cached_current_datetime = now + return _cached_formatted_datetime + + +def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None: + ref, name = info + ob = ref() + if ob is not None: + with suppress(Exception): + getattr(ob, name)() + + +def weakref_handle( + ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop +) -> Optional[asyncio.TimerHandle]: + if timeout is not None and timeout > 0: + when = loop.time() + timeout + if timeout >= 5: + when = ceil(when) + + return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) + return None + + +def call_later( + cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop +) -> Optional[asyncio.TimerHandle]: + if timeout is not None and timeout > 0: + when = loop.time() + timeout + if timeout > 5: + when = ceil(when) + return loop.call_at(when, cb) + return None + + +class TimeoutHandle: + """Timeout handle""" + + def __init__( + self, loop: asyncio.AbstractEventLoop, timeout: Optional[float] + ) -> None: + self._timeout = timeout + self._loop = loop + self._callbacks: List[ + Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]] + ] = [] + + def register( + self, callback: Callable[..., None], *args: Any, **kwargs: Any + ) -> None: + self._callbacks.append((callback, args, kwargs)) + + def close(self) -> None: + self._callbacks.clear() + + def start(self) -> Optional[asyncio.Handle]: + timeout = self._timeout + if timeout is not None and timeout > 0: + when = self._loop.time() + timeout + if timeout >= 5: + when = ceil(when) + return self._loop.call_at(when, self.__call__) + else: + return None + + def timer(self) -> "BaseTimerContext": + if self._timeout is not None and self._timeout > 0: + timer = TimerContext(self._loop) + self.register(timer.timeout) + return timer + else: + return TimerNoop() + + def __call__(self) -> None: + for cb, args, kwargs in self._callbacks: + with suppress(Exception): + cb(*args, **kwargs) + + self._callbacks.clear() + + +class BaseTimerContext(ContextManager["BaseTimerContext"]): + pass + + +class TimerNoop(BaseTimerContext): + def __enter__(self) -> BaseTimerContext: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + return + + +class TimerContext(BaseTimerContext): + """Low resolution timeout context manager""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._tasks: List[asyncio.Task[Any]] = [] + self._cancelled = False + + def __enter__(self) -> BaseTimerContext: + task = current_task(loop=self._loop) + + if task is None: + raise RuntimeError( + "Timeout context manager should be used " "inside a task" + ) + + if self._cancelled: + raise asyncio.TimeoutError from None + + self._tasks.append(task) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + if self._tasks: + self._tasks.pop() + + if exc_type is asyncio.CancelledError and self._cancelled: + raise asyncio.TimeoutError from None + return None + + def timeout(self) -> None: + if not self._cancelled: + for task in set(self._tasks): + task.cancel() + + self._cancelled = True + + +def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout: + if delay is None or delay <= 0: + return async_timeout.timeout(None) + + loop = get_running_loop() + now = loop.time() + when = now + delay + if delay > 5: + when = ceil(when) + return async_timeout.timeout_at(when) + + +class HeadersMixin: + + ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) + + _content_type: Optional[str] = None + _content_dict: Optional[Dict[str, str]] = None + _stored_content_type = sentinel + + def _parse_content_type(self, raw: str) -> None: + self._stored_content_type = raw + if raw is None: + # default value according to RFC 2616 + self._content_type = "application/octet-stream" + self._content_dict = {} + else: + msg = HeaderParser().parsestr("Content-Type: " + raw) + self._content_type = msg.get_content_type() + params = msg.get_params() + self._content_dict = dict(params[1:]) # First element is content type again + + @property + def content_type(self) -> str: + """The value of content part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined] + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_type # type: ignore[return-value] + + @property + def charset(self) -> Optional[str]: + """The value of charset part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined] + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_dict.get("charset") # type: ignore[union-attr] + + @property + def content_length(self) -> Optional[int]: + """The value of Content-Length HTTP header.""" + content_length = self._headers.get( # type: ignore[attr-defined] + hdrs.CONTENT_LENGTH + ) + + if content_length is not None: + return int(content_length) + else: + return None + + +def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: + if not fut.done(): + fut.set_result(result) + + +def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None: + if not fut.done(): + fut.set_exception(exc) + + +class ChainMapProxy(Mapping[str, Any]): + __slots__ = ("_maps",) + + def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None: + self._maps = tuple(maps) + + def __init_subclass__(cls) -> None: + raise TypeError( + "Inheritance class {} from ChainMapProxy " + "is forbidden".format(cls.__name__) + ) + + def __getitem__(self, key: str) -> Any: + for mapping in self._maps: + try: + return mapping[key] + except KeyError: + pass + raise KeyError(key) + + def get(self, key: str, default: Any = None) -> Any: + return self[key] if key in self else default + + def __len__(self) -> int: + # reuses stored hash values if possible + return len(set().union(*self._maps)) # type: ignore[arg-type] + + def __iter__(self) -> Iterator[str]: + d: Dict[str, Any] = {} + for mapping in reversed(self._maps): + # reuses stored hash values if possible + d.update(mapping) + return iter(d) + + def __contains__(self, key: object) -> bool: + return any(key in m for m in self._maps) + + def __bool__(self) -> bool: + return any(self._maps) + + def __repr__(self) -> str: + content = ", ".join(map(repr, self._maps)) + return f"ChainMapProxy({content})" + + +# https://tools.ietf.org/html/rfc7232#section-2.3 +_ETAGC = r"[!#-}\x80-\xff]+" +_ETAGC_RE = re.compile(_ETAGC) +_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"' +QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG) +LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)") + +ETAG_ANY = "*" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ETag: + value: str + is_weak: bool = False + + +def validate_etag_value(value: str) -> None: + if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value): + raise ValueError( + f"Value {value!r} is not a valid etag. Maybe it contains '\"'?" + ) + + +def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: + """Process a date string, return a datetime object""" + if date_str is not None: + timetuple = parsedate(date_str) + if timetuple is not None: + with suppress(ValueError): + return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) + return None diff --git a/venv/lib/python3.8/site-packages/aiohttp/http.py b/venv/lib/python3.8/site-packages/aiohttp/http.py new file mode 100644 index 0000000..ca9dc54 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/http.py @@ -0,0 +1,70 @@ +import http.server +import sys +from typing import Mapping, Tuple + +from . import __version__ +from .http_exceptions import HttpProcessingError as HttpProcessingError +from .http_parser import ( + HeadersParser as HeadersParser, + HttpParser as HttpParser, + HttpRequestParser as HttpRequestParser, + HttpResponseParser as HttpResponseParser, + RawRequestMessage as RawRequestMessage, + RawResponseMessage as RawResponseMessage, +) +from .http_websocket import ( + WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE, + WS_KEY as WS_KEY, + WebSocketError as WebSocketError, + WebSocketReader as WebSocketReader, + WebSocketWriter as WebSocketWriter, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, + ws_ext_gen as ws_ext_gen, + ws_ext_parse as ws_ext_parse, +) +from .http_writer import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + StreamWriter as StreamWriter, +) + +__all__ = ( + "HttpProcessingError", + "RESPONSES", + "SERVER_SOFTWARE", + # .http_writer + "StreamWriter", + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + # .http_parser + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", + # .http_websocket + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "ws_ext_gen", + "ws_ext_parse", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", +) + + +SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format( + sys.version_info, __version__ +) + +RESPONSES: Mapping[int, Tuple[str, str]] = http.server.BaseHTTPRequestHandler.responses diff --git a/venv/lib/python3.8/site-packages/aiohttp/http_exceptions.py b/venv/lib/python3.8/site-packages/aiohttp/http_exceptions.py new file mode 120000 index 0000000..93edca6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/http_exceptions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/78/00/90/b7b6b3c0d6d48bc6aa5c1e49dad21c73f081e3fe32e262ccf5265de2d3 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/http_parser.py b/venv/lib/python3.8/site-packages/aiohttp/http_parser.py new file mode 100644 index 0000000..5a66ce4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/http_parser.py @@ -0,0 +1,969 @@ +import abc +import asyncio +import collections +import re +import string +import zlib +from contextlib import suppress +from enum import IntEnum +from typing import ( + Any, + Generic, + List, + NamedTuple, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +from multidict import CIMultiDict, CIMultiDictProxy, istr +from yarl import URL + +from . import hdrs +from .base_protocol import BaseProtocol +from .helpers import NO_EXTENSIONS, BaseTimerContext +from .http_exceptions import ( + BadHttpMessage, + BadStatusLine, + ContentEncodingError, + ContentLengthError, + InvalidHeader, + LineTooLong, + TransferEncodingError, +) +from .http_writer import HttpVersion, HttpVersion10 +from .log import internal_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .typedefs import Final, RawHeaders + +try: + import brotli + + HAS_BROTLI = True +except ImportError: # pragma: no cover + HAS_BROTLI = False + + +__all__ = ( + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", +) + +ASCIISET: Final[Set[str]] = set(string.printable) + +# See https://tools.ietf.org/html/rfc7230#section-3.1.1 +# and https://tools.ietf.org/html/rfc7230#appendix-B +# +# method = token +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / +# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA +# token = 1*tchar +METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+") +VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)") +HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]") + + +class RawRequestMessage(NamedTuple): + method: str + path: str + version: HttpVersion + headers: "CIMultiDictProxy[str]" + raw_headers: RawHeaders + should_close: bool + compression: Optional[str] + upgrade: bool + chunked: bool + url: URL + + +RawResponseMessage = collections.namedtuple( + "RawResponseMessage", + [ + "version", + "code", + "reason", + "headers", + "raw_headers", + "should_close", + "compression", + "upgrade", + "chunked", + ], +) + + +_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage) + + +class ParseState(IntEnum): + + PARSE_NONE = 0 + PARSE_LENGTH = 1 + PARSE_CHUNKED = 2 + PARSE_UNTIL_EOF = 3 + + +class ChunkState(IntEnum): + PARSE_CHUNKED_SIZE = 0 + PARSE_CHUNKED_CHUNK = 1 + PARSE_CHUNKED_CHUNK_EOF = 2 + PARSE_MAYBE_TRAILERS = 3 + PARSE_TRAILERS = 4 + + +class HeadersParser: + def __init__( + self, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + ) -> None: + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: + headers: CIMultiDict[str] = CIMultiDict() + raw_headers = [] + + lines_idx = 1 + line = lines[1] + line_count = len(lines) + + while line: + # Parse initial header name : value pair. + try: + bname, bvalue = line.split(b":", 1) + except ValueError: + raise InvalidHeader(line) from None + + bname = bname.strip(b" \t") + bvalue = bvalue.lstrip() + if HDRRE.search(bname): + raise InvalidHeader(bname) + if len(bname) > self.max_field_size: + raise LineTooLong( + "request header name {}".format( + bname.decode("utf8", "xmlcharrefreplace") + ), + str(self.max_field_size), + str(len(bname)), + ) + + header_length = len(bvalue) + + # next line + lines_idx += 1 + line = lines[lines_idx] + + # consume continuation lines + continuation = line and line[0] in (32, 9) # (' ', '\t') + + if continuation: + bvalue_lst = [bvalue] + while continuation: + header_length += len(line) + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "xmlcharrefreplace") + ), + str(self.max_field_size), + str(header_length), + ) + bvalue_lst.append(line) + + # next line + lines_idx += 1 + if lines_idx < line_count: + line = lines[lines_idx] + if line: + continuation = line[0] in (32, 9) # (' ', '\t') + else: + line = b"" + break + bvalue = b"".join(bvalue_lst) + else: + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "xmlcharrefreplace") + ), + str(self.max_field_size), + str(header_length), + ) + + bvalue = bvalue.strip() + name = bname.decode("utf-8", "surrogateescape") + value = bvalue.decode("utf-8", "surrogateescape") + + headers.add(name, value) + raw_headers.append((bname, bvalue)) + + return (CIMultiDictProxy(headers), tuple(raw_headers)) + + +class HttpParser(abc.ABC, Generic[_MsgT]): + def __init__( + self, + protocol: Optional[BaseProtocol] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + limit: int = 2**16, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + timer: Optional[BaseTimerContext] = None, + code: Optional[int] = None, + method: Optional[str] = None, + readall: bool = False, + payload_exception: Optional[Type[BaseException]] = None, + response_with_body: bool = True, + read_until_eof: bool = False, + auto_decompress: bool = True, + ) -> None: + self.protocol = protocol + self.loop = loop + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + self.timer = timer + self.code = code + self.method = method + self.readall = readall + self.payload_exception = payload_exception + self.response_with_body = response_with_body + self.read_until_eof = read_until_eof + + self._lines: List[bytes] = [] + self._tail = b"" + self._upgraded = False + self._payload = None + self._payload_parser: Optional[HttpPayloadParser] = None + self._auto_decompress = auto_decompress + self._limit = limit + self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size) + + @abc.abstractmethod + def parse_message(self, lines: List[bytes]) -> _MsgT: + pass + + def feed_eof(self) -> Optional[_MsgT]: + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + else: + # try to extract partial message + if self._tail: + self._lines.append(self._tail) + + if self._lines: + if self._lines[-1] != "\r\n": + self._lines.append(b"") + with suppress(Exception): + return self.parse_message(self._lines) + return None + + def feed_data( + self, + data: bytes, + SEP: bytes = b"\r\n", + EMPTY: bytes = b"", + CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, + METH_CONNECT: str = hdrs.METH_CONNECT, + SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1, + ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]: + + messages = [] + + if self._tail: + data, self._tail = self._tail + data, b"" + + data_len = len(data) + start_pos = 0 + loop = self.loop + + while start_pos < data_len: + + # read HTTP message (request/response line + headers), \r\n\r\n + # and split by lines + if self._payload_parser is None and not self._upgraded: + pos = data.find(SEP, start_pos) + # consume \r\n + if pos == start_pos and not self._lines: + start_pos = pos + 2 + continue + + if pos >= start_pos: + # line found + self._lines.append(data[start_pos:pos]) + start_pos = pos + 2 + + # \r\n\r\n found + if self._lines[-1] == EMPTY: + try: + msg: _MsgT = self.parse_message(self._lines) + finally: + self._lines.clear() + + def get_content_length() -> Optional[int]: + # payload length + length_hdr = msg.headers.get(CONTENT_LENGTH) + if length_hdr is None: + return None + + try: + length = int(length_hdr) + except ValueError: + raise InvalidHeader(CONTENT_LENGTH) + + if length < 0: + raise InvalidHeader(CONTENT_LENGTH) + + return length + + length = get_content_length() + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in msg.headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + self._upgraded = msg.upgrade + + method = getattr(msg, "method", self.method) + + assert self.protocol is not None + # calculate payload + if ( + (length is not None and length > 0) + or msg.chunked + and not msg.upgrade + ): + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + readall=self.readall, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + elif method == METH_CONNECT: + assert isinstance(msg, RawRequestMessage) + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + self._upgraded = True + self._payload_parser = HttpPayloadParser( + payload, + method=msg.method, + compression=msg.compression, + readall=True, + auto_decompress=self._auto_decompress, + ) + else: + if ( + getattr(msg, "code", 100) >= 199 + and length is None + and self.read_until_eof + ): + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + readall=True, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + else: + payload = EMPTY_PAYLOAD + + messages.append((msg, payload)) + else: + self._tail = data[start_pos:] + data = EMPTY + break + + # no parser, just store + elif self._payload_parser is None and self._upgraded: + assert not self._lines + break + + # feed payload + elif data and start_pos < data_len: + assert not self._lines + assert self._payload_parser is not None + try: + eof, data = self._payload_parser.feed_data(data[start_pos:]) + except BaseException as exc: + if self.payload_exception is not None: + self._payload_parser.payload.set_exception( + self.payload_exception(str(exc)) + ) + else: + self._payload_parser.payload.set_exception(exc) + + eof = True + data = b"" + + if eof: + start_pos = 0 + data_len = len(data) + self._payload_parser = None + continue + else: + break + + if data and start_pos < data_len: + data = data[start_pos:] + else: + data = EMPTY + + return messages, self._upgraded, data + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple[ + "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool + ]: + """Parses RFC 5322 headers from a stream. + + Line continuations are supported. Returns list of header name + and value pairs. Header name is in upper case. + """ + headers, raw_headers = self._headers_parser.parse_headers(lines) + close_conn = None + encoding = None + upgrade = False + chunked = False + + # keep-alive + conn = headers.get(hdrs.CONNECTION) + if conn: + v = conn.lower() + if v == "close": + close_conn = True + elif v == "keep-alive": + close_conn = False + elif v == "upgrade": + upgrade = True + + # encoding + enc = headers.get(hdrs.CONTENT_ENCODING) + if enc: + enc = enc.lower() + if enc in ("gzip", "deflate", "br"): + encoding = enc + + # chunking + te = headers.get(hdrs.TRANSFER_ENCODING) + if te is not None: + if "chunked" == te.lower(): + chunked = True + else: + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") + + if hdrs.CONTENT_LENGTH in headers: + raise BadHttpMessage( + "Content-Length can't be present with Transfer-Encoding", + ) + + return (headers, raw_headers, close_conn, encoding, upgrade, chunked) + + def set_upgraded(self, val: bool) -> None: + """Set connection upgraded (to websocket) mode. + + :param bool val: new state. + """ + self._upgraded = val + + +class HttpRequestParser(HttpParser[RawRequestMessage]): + """Read request status line. + + Exception .http_exceptions.BadStatusLine + could be raised in case of any errors in status line. + Returns RawRequestMessage. + """ + + def parse_message(self, lines: List[bytes]) -> RawRequestMessage: + # request line + line = lines[0].decode("utf-8", "surrogateescape") + try: + method, path, version = line.split(None, 2) + except ValueError: + raise BadStatusLine(line) from None + + if len(path) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(path)) + ) + + # method + if not METHRE.match(method): + raise BadStatusLine(method) + + # version + try: + if version.startswith("HTTP/"): + n1, n2 = version[5:].split(".", 1) + version_o = HttpVersion(int(n1), int(n2)) + else: + raise BadStatusLine(version) + except Exception: + raise BadStatusLine(version) + + if method == "CONNECT": + # authority-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 + url = URL.build(authority=path, encoded=True) + elif path.startswith("/"): + # origin-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 + path_part, _hash_separator, url_fragment = path.partition("#") + path_part, _question_mark_separator, qs_part = path_part.partition("?") + + # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based + # NOTE: parser does, otherwise it results into the same + # NOTE: HTTP Request-Line input producing different + # NOTE: `yarl.URL()` objects + url = URL.build( + path=path_part, + query_string=qs_part, + fragment=url_fragment, + encoded=True, + ) + else: + # absolute-form for proxy maybe, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 + url = URL(path, encoded=True) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: # then the headers weren't set in the request + if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close + close = True + else: # HTTP 1.1 must ask to close. + close = False + + return RawRequestMessage( + method, + path, + version_o, + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + url, + ) + + +class HttpResponseParser(HttpParser[RawResponseMessage]): + """Read response status line and headers. + + BadStatusLine could be raised in case of any errors in status line. + Returns RawResponseMessage. + """ + + def parse_message(self, lines: List[bytes]) -> RawResponseMessage: + line = lines[0].decode("utf-8", "surrogateescape") + try: + version, status = line.split(None, 1) + except ValueError: + raise BadStatusLine(line) from None + + try: + status, reason = status.split(None, 1) + except ValueError: + reason = "" + + if len(reason) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(reason)) + ) + + # version + match = VERSRE.match(version) + if match is None: + raise BadStatusLine(line) + version_o = HttpVersion(int(match.group(1)), int(match.group(2))) + + # The status code is a three-digit number + try: + status_i = int(status) + except ValueError: + raise BadStatusLine(line) from None + + if status_i > 999: + raise BadStatusLine(line) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: + close = version_o <= HttpVersion10 + + return RawResponseMessage( + version_o, + status_i, + reason.strip(), + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) + + +class HttpPayloadParser: + def __init__( + self, + payload: StreamReader, + length: Optional[int] = None, + chunked: bool = False, + compression: Optional[str] = None, + code: Optional[int] = None, + method: Optional[str] = None, + readall: bool = False, + response_with_body: bool = True, + auto_decompress: bool = True, + ) -> None: + self._length = 0 + self._type = ParseState.PARSE_NONE + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + self._chunk_size = 0 + self._chunk_tail = b"" + self._auto_decompress = auto_decompress + self.done = False + + # payload decompression wrapper + if response_with_body and compression and self._auto_decompress: + real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer( + payload, compression + ) + else: + real_payload = payload + + # payload parser + if not response_with_body: + # don't parse payload if it's not expected to be received + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + + elif chunked: + self._type = ParseState.PARSE_CHUNKED + elif length is not None: + self._type = ParseState.PARSE_LENGTH + self._length = length + if self._length == 0: + real_payload.feed_eof() + self.done = True + else: + if readall and code != 204: + self._type = ParseState.PARSE_UNTIL_EOF + elif method in ("PUT", "POST"): + internal_logger.warning( # pragma: no cover + "Content-Length or Transfer-Encoding header is required" + ) + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + + self.payload = real_payload + + def feed_eof(self) -> None: + if self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_eof() + elif self._type == ParseState.PARSE_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header." + ) + elif self._type == ParseState.PARSE_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header." + ) + + def feed_data( + self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";" + ) -> Tuple[bool, bytes]: + # Read specified amount of bytes + if self._type == ParseState.PARSE_LENGTH: + required = self._length + chunk_len = len(chunk) + + if required >= chunk_len: + self._length = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + if self._length == 0: + self.payload.feed_eof() + return True, b"" + else: + self._length = 0 + self.payload.feed_data(chunk[:required], required) + self.payload.feed_eof() + return True, chunk[required:] + + # Chunked transfer encoding parser + elif self._type == ParseState.PARSE_CHUNKED: + if self._chunk_tail: + chunk = self._chunk_tail + chunk + self._chunk_tail = b"" + + while chunk: + + # read next chunk size + if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: + pos = chunk.find(SEP) + if pos >= 0: + i = chunk.find(CHUNK_EXT, 0, pos) + if i >= 0: + size_b = chunk[:i] # strip chunk-extensions + else: + size_b = chunk[:pos] + + try: + size = int(bytes(size_b), 16) + except ValueError: + exc = TransferEncodingError( + chunk[:pos].decode("ascii", "surrogateescape") + ) + self.payload.set_exception(exc) + raise exc from None + + chunk = chunk[pos + 2 :] + if size == 0: # eof marker + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + else: + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK + self._chunk_size = size + self.payload.begin_http_chunk_receiving() + else: + self._chunk_tail = chunk + return False, b"" + + # read chunk and feed buffer + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: + required = self._chunk_size + chunk_len = len(chunk) + + if required > chunk_len: + self._chunk_size = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + return False, b"" + else: + self._chunk_size = 0 + self.payload.feed_data(chunk[:required], required) + chunk = chunk[required:] + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF + self.payload.end_http_chunk_receiving() + + # toss the CRLF at the end of the chunk + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: + if chunk[:2] == SEP: + chunk = chunk[2:] + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + else: + self._chunk_tail = chunk + return False, b"" + + # if stream does not contain trailer, after 0\r\n + # we should get another \r\n otherwise + # trailers needs to be skiped until \r\n\r\n + if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: + head = chunk[:2] + if head == SEP: + # end of stream + self.payload.feed_eof() + return True, chunk[2:] + # Both CR and LF, or only LF may not be received yet. It is + # expected that CRLF or LF will be shown at the very first + # byte next time, otherwise trailers should come. The last + # CRLF which marks the end of response might not be + # contained in the same TCP segment which delivered the + # size indicator. + if not head: + return False, b"" + if head == SEP[:1]: + self._chunk_tail = head + return False, b"" + self._chunk = ChunkState.PARSE_TRAILERS + + # read and discard trailer up to the CRLF terminator + if self._chunk == ChunkState.PARSE_TRAILERS: + pos = chunk.find(SEP) + if pos >= 0: + chunk = chunk[pos + 2 :] + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + else: + self._chunk_tail = chunk + return False, b"" + + # Read all bytes until eof + elif self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_data(chunk, len(chunk)) + + return False, b"" + + +class DeflateBuffer: + """DeflateStream decompress stream and feed data into specified stream.""" + + decompressor: Any + + def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: + self.out = out + self.size = 0 + self.encoding = encoding + self._started_decoding = False + + if encoding == "br": + if not HAS_BROTLI: # pragma: no cover + raise ContentEncodingError( + "Can not decode content-encoding: brotli (br). " + "Please install `Brotli`" + ) + + class BrotliDecoder: + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self) -> None: + self._obj = brotli.Decompressor() + + def decompress(self, data: bytes) -> bytes: + if hasattr(self._obj, "decompress"): + return cast(bytes, self._obj.decompress(data)) + return cast(bytes, self._obj.process(data)) + + def flush(self) -> bytes: + if hasattr(self._obj, "flush"): + return cast(bytes, self._obj.flush()) + return b"" + + self.decompressor = BrotliDecoder() + else: + zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS + self.decompressor = zlib.decompressobj(wbits=zlib_mode) + + def set_exception(self, exc: BaseException) -> None: + self.out.set_exception(exc) + + def feed_data(self, chunk: bytes, size: int) -> None: + if not size: + return + + self.size += size + + # RFC1950 + # bits 0..3 = CM = 0b1000 = 8 = "deflate" + # bits 4..7 = CINFO = 1..7 = windows size. + if ( + not self._started_decoding + and self.encoding == "deflate" + and chunk[0] & 0xF != 8 + ): + # Change the decoder to decompress incorrectly compressed data + # Actually we should issue a warning about non-RFC-compliant data. + self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS) + + try: + chunk = self.decompressor.decompress(chunk) + except Exception: + raise ContentEncodingError( + "Can not decode content-encoding: %s" % self.encoding + ) + + self._started_decoding = True + + if chunk: + self.out.feed_data(chunk, len(chunk)) + + def feed_eof(self) -> None: + chunk = self.decompressor.flush() + + if chunk or self.size > 0: + self.out.feed_data(chunk, len(chunk)) + if self.encoding == "deflate" and not self.decompressor.eof: + raise ContentEncodingError("deflate") + + self.out.feed_eof() + + def begin_http_chunk_receiving(self) -> None: + self.out.begin_http_chunk_receiving() + + def end_http_chunk_receiving(self) -> None: + self.out.end_http_chunk_receiving() + + +HttpRequestParserPy = HttpRequestParser +HttpResponseParserPy = HttpResponseParser +RawRequestMessagePy = RawRequestMessage +RawResponseMessagePy = RawResponseMessage + +try: + if not NO_EXTENSIONS: + from ._http_parser import ( # type: ignore[import,no-redef] + HttpRequestParser, + HttpResponseParser, + RawRequestMessage, + RawResponseMessage, + ) + + HttpRequestParserC = HttpRequestParser + HttpResponseParserC = HttpResponseParser + RawRequestMessageC = RawRequestMessage + RawResponseMessageC = RawResponseMessage +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.8/site-packages/aiohttp/http_websocket.py b/venv/lib/python3.8/site-packages/aiohttp/http_websocket.py new file mode 100644 index 0000000..2cfc519 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/http_websocket.py @@ -0,0 +1,701 @@ +"""WebSocket protocol versions 13 and 8.""" + +import asyncio +import collections +import json +import random +import re +import sys +import zlib +from enum import IntEnum +from struct import Struct +from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast + +from .base_protocol import BaseProtocol +from .helpers import NO_EXTENSIONS +from .streams import DataQueue +from .typedefs import Final + +__all__ = ( + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", +) + + +class WSCloseCode(IntEnum): + OK = 1000 + GOING_AWAY = 1001 + PROTOCOL_ERROR = 1002 + UNSUPPORTED_DATA = 1003 + ABNORMAL_CLOSURE = 1006 + INVALID_TEXT = 1007 + POLICY_VIOLATION = 1008 + MESSAGE_TOO_BIG = 1009 + MANDATORY_EXTENSION = 1010 + INTERNAL_ERROR = 1011 + SERVICE_RESTART = 1012 + TRY_AGAIN_LATER = 1013 + BAD_GATEWAY = 1014 + + +ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} + + +class WSMsgType(IntEnum): + # websocket spec types + CONTINUATION = 0x0 + TEXT = 0x1 + BINARY = 0x2 + PING = 0x9 + PONG = 0xA + CLOSE = 0x8 + + # aiohttp specific types + CLOSING = 0x100 + CLOSED = 0x101 + ERROR = 0x102 + + text = TEXT + binary = BINARY + ping = PING + pong = PONG + close = CLOSE + closing = CLOSING + closed = CLOSED + error = ERROR + + +WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + +UNPACK_LEN2 = Struct("!H").unpack_from +UNPACK_LEN3 = Struct("!Q").unpack_from +UNPACK_CLOSE_CODE = Struct("!H").unpack +PACK_LEN1 = Struct("!BB").pack +PACK_LEN2 = Struct("!BBH").pack +PACK_LEN3 = Struct("!BBQ").pack +PACK_CLOSE_CODE = Struct("!H").pack +MSG_SIZE: Final[int] = 2**14 +DEFAULT_LIMIT: Final[int] = 2**16 + + +_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"]) + + +class WSMessage(_WSMessageBase): + def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: + """Return parsed JSON data. + + .. versionadded:: 0.22 + """ + return loads(self.data) + + +WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) +WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) + + +class WebSocketError(Exception): + """WebSocket protocol parser error.""" + + def __init__(self, code: int, message: str) -> None: + self.code = code + super().__init__(code, message) + + def __str__(self) -> str: + return cast(str, self.args[1]) + + +class WSHandshakeError(Exception): + """WebSocket protocol handshake error.""" + + +native_byteorder: Final[str] = sys.byteorder + + +# Used by _websocket_mask_python +_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)] + + +def _websocket_mask_python(mask: bytes, data: bytearray) -> None: + """Websocket masking function. + + `mask` is a `bytes` object of length 4; `data` is a `bytearray` + object of any length. The contents of `data` are masked with `mask`, + as specified in section 5.3 of RFC 6455. + + Note that this function mutates the `data` argument. + + This pure-python implementation may be replaced by an optimized + version when available. + + """ + assert isinstance(data, bytearray), data + assert len(mask) == 4, mask + + if data: + a, b, c, d = (_XOR_TABLE[n] for n in mask) + data[::4] = data[::4].translate(a) + data[1::4] = data[1::4].translate(b) + data[2::4] = data[2::4].translate(c) + data[3::4] = data[3::4].translate(d) + + +if NO_EXTENSIONS: # pragma: no cover + _websocket_mask = _websocket_mask_python +else: + try: + from ._websocket import _websocket_mask_cython # type: ignore[import] + + _websocket_mask = _websocket_mask_cython + except ImportError: # pragma: no cover + _websocket_mask = _websocket_mask_python + +_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) + + +_WS_EXT_RE: Final[Pattern[str]] = re.compile( + r"^(?:;\s*(?:" + r"(server_no_context_takeover)|" + r"(client_no_context_takeover)|" + r"(server_max_window_bits(?:=(\d+))?)|" + r"(client_max_window_bits(?:=(\d+))?)))*$" +) + +_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") + + +def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: + if not extstr: + return 0, False + + compress = 0 + notakeover = False + for ext in _WS_EXT_RE_SPLIT.finditer(extstr): + defext = ext.group(1) + # Return compress = 15 when get `permessage-deflate` + if not defext: + compress = 15 + break + match = _WS_EXT_RE.match(defext) + if match: + compress = 15 + if isserver: + # Server never fail to detect compress handshake. + # Server does not need to send max wbit to client + if match.group(4): + compress = int(match.group(4)) + # Group3 must match if group4 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # CONTINUE to next extension + if compress > 15 or compress < 9: + compress = 0 + continue + if match.group(1): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + else: + if match.group(6): + compress = int(match.group(6)) + # Group5 must match if group6 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # FAIL the parse progress + if compress > 15 or compress < 9: + raise WSHandshakeError("Invalid window size") + if match.group(2): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + # Return Fail if client side and not match + elif not isserver: + raise WSHandshakeError("Extension for deflate not supported" + ext.group(1)) + + return compress, notakeover + + +def ws_ext_gen( + compress: int = 15, isserver: bool = False, server_notakeover: bool = False +) -> str: + # client_notakeover=False not used for server + # compress wbit 8 does not support in zlib + if compress < 9 or compress > 15: + raise ValueError( + "Compress wbits must between 9 and 15, " "zlib does not support wbits=8" + ) + enabledext = ["permessage-deflate"] + if not isserver: + enabledext.append("client_max_window_bits") + + if compress < 15: + enabledext.append("server_max_window_bits=" + str(compress)) + if server_notakeover: + enabledext.append("server_no_context_takeover") + # if client_notakeover: + # enabledext.append('client_no_context_takeover') + return "; ".join(enabledext) + + +class WSParserState(IntEnum): + READ_HEADER = 1 + READ_PAYLOAD_LENGTH = 2 + READ_PAYLOAD_MASK = 3 + READ_PAYLOAD = 4 + + +class WebSocketReader: + def __init__( + self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True + ) -> None: + self.queue = queue + self._max_msg_size = max_msg_size + + self._exc: Optional[BaseException] = None + self._partial = bytearray() + self._state = WSParserState.READ_HEADER + + self._opcode: Optional[int] = None + self._frame_fin = False + self._frame_opcode: Optional[int] = None + self._frame_payload = bytearray() + + self._tail = b"" + self._has_mask = False + self._frame_mask: Optional[bytes] = None + self._payload_length = 0 + self._payload_length_flag = 0 + self._compressed: Optional[bool] = None + self._decompressobj: Any = None # zlib.decompressobj actually + self._compress = compress + + def feed_eof(self) -> None: + self.queue.feed_eof() + + def feed_data(self, data: bytes) -> Tuple[bool, bytes]: + if self._exc: + return True, data + + try: + return self._feed_data(data) + except Exception as exc: + self._exc = exc + self.queue.set_exception(exc) + return True, b"" + + def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: + for fin, opcode, payload, compressed in self.parse_frame(data): + if compressed and not self._decompressobj: + self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS) + if opcode == WSMsgType.CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close code: {close_code}", + ) + try: + close_message = payload[2:].decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) + elif payload: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close frame: {fin} {opcode} {payload!r}", + ) + else: + msg = WSMessage(WSMsgType.CLOSE, 0, "") + + self.queue.feed_data(msg, 0) + + elif opcode == WSMsgType.PING: + self.queue.feed_data( + WSMessage(WSMsgType.PING, payload, ""), len(payload) + ) + + elif opcode == WSMsgType.PONG: + self.queue.feed_data( + WSMessage(WSMsgType.PONG, payload, ""), len(payload) + ) + + elif ( + opcode not in (WSMsgType.TEXT, WSMsgType.BINARY) + and self._opcode is None + ): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + ) + else: + # load text/binary + if not fin: + # got partial frame payload + if opcode != WSMsgType.CONTINUATION: + self._opcode = opcode + self._partial.extend(payload) + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + else: + # previous frame was non finished + # we should get continuation opcode + if self._partial: + if opcode != WSMsgType.CONTINUATION: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + "to be zero, got {!r}".format(opcode), + ) + + if opcode == WSMsgType.CONTINUATION: + assert self._opcode is not None + opcode = self._opcode + self._opcode = None + + self._partial.extend(payload) + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + self._partial.extend(_WS_DEFLATE_TRAILING) + payload_merged = self._decompressobj.decompress( + self._partial, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size {} exceeds limit {}".format( + self._max_msg_size + left, self._max_msg_size + ), + ) + else: + payload_merged = bytes(self._partial) + + self._partial.clear() + + if opcode == WSMsgType.TEXT: + try: + text = payload_merged.decode("utf-8") + self.queue.feed_data( + WSMessage(WSMsgType.TEXT, text, ""), len(text) + ) + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + else: + self.queue.feed_data( + WSMessage(WSMsgType.BINARY, payload_merged, ""), + len(payload_merged), + ) + + return False, b"" + + def parse_frame( + self, buf: bytes + ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: + """Return the next frame from the socket.""" + frames = [] + if self._tail: + buf, self._tail = self._tail + buf, b"" + + start_pos = 0 + buf_length = len(buf) + + while True: + # read header + if self._state == WSParserState.READ_HEADER: + if buf_length - start_pos >= 2: + data = buf[start_pos : start_pos + 2] + start_pos += 2 + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xF + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received fragmented control frame", + ) + + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7F + + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be " "larger than 125 bytes", + ) + + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = WSParserState.READ_PAYLOAD_LENGTH + else: + break + + # read payload length + if self._state == WSParserState.READ_PAYLOAD_LENGTH: + length = self._payload_length_flag + if length == 126: + if buf_length - start_pos >= 2: + data = buf[start_pos : start_pos + 2] + start_pos += 2 + length = UNPACK_LEN2(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + else: + break + elif length > 126: + if buf_length - start_pos >= 8: + data = buf[start_pos : start_pos + 8] + start_pos += 8 + length = UNPACK_LEN3(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + else: + break + else: + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + + # read payload mask + if self._state == WSParserState.READ_PAYLOAD_MASK: + if buf_length - start_pos >= 4: + self._frame_mask = buf[start_pos : start_pos + 4] + start_pos += 4 + self._state = WSParserState.READ_PAYLOAD + else: + break + + if self._state == WSParserState.READ_PAYLOAD: + length = self._payload_length + payload = self._frame_payload + + chunk_len = buf_length - start_pos + if length >= chunk_len: + self._payload_length = length - chunk_len + payload.extend(buf[start_pos:]) + start_pos = buf_length + else: + self._payload_length = 0 + payload.extend(buf[start_pos : start_pos + length]) + start_pos = start_pos + length + + if self._payload_length == 0: + if self._has_mask: + assert self._frame_mask is not None + _websocket_mask(self._frame_mask, payload) + + frames.append( + (self._frame_fin, self._frame_opcode, payload, self._compressed) + ) + + self._frame_payload = bytearray() + self._state = WSParserState.READ_HEADER + else: + break + + self._tail = buf[start_pos:] + + return frames + + +class WebSocketWriter: + def __init__( + self, + protocol: BaseProtocol, + transport: asyncio.Transport, + *, + use_mask: bool = False, + limit: int = DEFAULT_LIMIT, + random: Any = random.Random(), + compress: int = 0, + notakeover: bool = False, + ) -> None: + self.protocol = protocol + self.transport = transport + self.use_mask = use_mask + self.randrange = random.randrange + self.compress = compress + self.notakeover = notakeover + self._closing = False + self._limit = limit + self._output_size = 0 + self._compressobj: Any = None # actually compressobj + + async def _send_frame( + self, message: bytes, opcode: int, compress: Optional[int] = None + ) -> None: + """Send a frame over the websocket with message as its payload.""" + if self._closing and not (opcode & WSMsgType.CLOSE): + raise ConnectionResetError("Cannot write to closing transport") + + rsv = 0 + + # Only compress larger packets (disabled) + # Does small packet needs to be compressed? + # if self.compress and opcode < 8 and len(message) > 124: + if (compress or self.compress) and opcode < 8: + if compress: + # Do not set self._compress if compressing is for this frame + compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress) + else: # self.compress + if not self._compressobj: + self._compressobj = zlib.compressobj( + level=zlib.Z_BEST_SPEED, wbits=-self.compress + ) + compressobj = self._compressobj + + message = compressobj.compress(message) + message = message + compressobj.flush( + zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH + ) + if message.endswith(_WS_DEFLATE_TRAILING): + message = message[:-4] + rsv = rsv | 0x40 + + msg_length = len(message) + + use_mask = self.use_mask + if use_mask: + mask_bit = 0x80 + else: + mask_bit = 0 + + if msg_length < 126: + header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) + elif msg_length < (1 << 16): + header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) + else: + header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) + if use_mask: + mask = self.randrange(0, 0xFFFFFFFF) + mask = mask.to_bytes(4, "big") + message = bytearray(message) + _websocket_mask(mask, message) + self._write(header + mask + message) + self._output_size += len(header) + len(mask) + len(message) + else: + if len(message) > MSG_SIZE: + self._write(header) + self._write(message) + else: + self._write(header + message) + + self._output_size += len(header) + len(message) + + if self._output_size > self._limit: + self._output_size = 0 + await self.protocol._drain_helper() + + def _write(self, data: bytes) -> None: + if self.transport is None or self.transport.is_closing(): + raise ConnectionResetError("Cannot write to closing transport") + self.transport.write(data) + + async def pong(self, message: bytes = b"") -> None: + """Send pong message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self._send_frame(message, WSMsgType.PONG) + + async def ping(self, message: bytes = b"") -> None: + """Send ping message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self._send_frame(message, WSMsgType.PING) + + async def send( + self, + message: Union[str, bytes], + binary: bool = False, + compress: Optional[int] = None, + ) -> None: + """Send a frame over the websocket with message as its payload.""" + if isinstance(message, str): + message = message.encode("utf-8") + if binary: + await self._send_frame(message, WSMsgType.BINARY, compress) + else: + await self._send_frame(message, WSMsgType.TEXT, compress) + + async def close(self, code: int = 1000, message: bytes = b"") -> None: + """Close the websocket, sending the specified code and message.""" + if isinstance(message, str): + message = message.encode("utf-8") + try: + await self._send_frame( + PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE + ) + finally: + self._closing = True diff --git a/venv/lib/python3.8/site-packages/aiohttp/http_writer.py b/venv/lib/python3.8/site-packages/aiohttp/http_writer.py new file mode 100644 index 0000000..db3d6a0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/http_writer.py @@ -0,0 +1,200 @@ +"""Http related parsers and protocol.""" + +import asyncio +import zlib +from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa + +from multidict import CIMultiDict + +from .abc import AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import NO_EXTENSIONS + +__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11") + + +class HttpVersion(NamedTuple): + major: int + minor: int + + +HttpVersion10 = HttpVersion(1, 0) +HttpVersion11 = HttpVersion(1, 1) + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] +_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]] + + +class StreamWriter(AbstractStreamWriter): + def __init__( + self, + protocol: BaseProtocol, + loop: asyncio.AbstractEventLoop, + on_chunk_sent: _T_OnChunkSent = None, + on_headers_sent: _T_OnHeadersSent = None, + ) -> None: + self._protocol = protocol + self._transport = protocol.transport + + self.loop = loop + self.length = None + self.chunked = False + self.buffer_size = 0 + self.output_size = 0 + + self._eof = False + self._compress: Any = None + self._drain_waiter = None + + self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent + self._on_headers_sent: _T_OnHeadersSent = on_headers_sent + + @property + def transport(self) -> Optional[asyncio.Transport]: + return self._transport + + @property + def protocol(self) -> BaseProtocol: + return self._protocol + + def enable_chunking(self) -> None: + self.chunked = True + + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS + self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy) + + def _write(self, chunk: bytes) -> None: + size = len(chunk) + self.buffer_size += size + self.output_size += size + + if self._transport is None or self._transport.is_closing(): + raise ConnectionResetError("Cannot write to closing transport") + self._transport.write(chunk) + + async def write( + self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 + ) -> None: + """Writes chunk of data to a stream. + + write_eof() indicates end of stream. + writer can't be used after write_eof() method being called. + write() return drain future. + """ + if self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if isinstance(chunk, memoryview): + if chunk.nbytes != len(chunk): + # just reshape it + chunk = chunk.cast("c") + + if self._compress is not None: + chunk = self._compress.compress(chunk) + if not chunk: + return + + if self.length is not None: + chunk_len = len(chunk) + if self.length >= chunk_len: + self.length = self.length - chunk_len + else: + chunk = chunk[: self.length] + self.length = 0 + if not chunk: + return + + if chunk: + if self.chunked: + chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len_pre + chunk + b"\r\n" + + self._write(chunk) + + if self.buffer_size > LIMIT and drain: + self.buffer_size = 0 + await self.drain() + + async def write_headers( + self, status_line: str, headers: "CIMultiDict[str]" + ) -> None: + """Write request/response status and headers.""" + if self._on_headers_sent is not None: + await self._on_headers_sent(headers) + + # status + headers + buf = _serialize_headers(status_line, headers) + self._write(buf) + + async def write_eof(self, chunk: bytes = b"") -> None: + if self._eof: + return + + if chunk and self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if self._compress: + if chunk: + chunk = self._compress.compress(chunk) + + chunk = chunk + self._compress.flush() + if chunk and self.chunked: + chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" + else: + if self.chunked: + if chunk: + chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" + else: + chunk = b"0\r\n\r\n" + + if chunk: + self._write(chunk) + + await self.drain() + + self._eof = True + self._transport = None + + async def drain(self) -> None: + """Flush the write buffer. + + The intended use is to write + + await w.write(data) + await w.drain() + """ + if self._protocol.transport is not None: + await self._protocol._drain_helper() + + +def _safe_header(string: str) -> str: + if "\r" in string or "\n" in string: + raise ValueError( + "Newline or carriage return detected in headers. " + "Potential header injection attack." + ) + return string + + +def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes: + headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items()) + line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n" + return line.encode("utf-8") + + +_serialize_headers = _py_serialize_headers + +try: + import aiohttp._http_writer as _http_writer # type: ignore[import] + + _c_serialize_headers = _http_writer._serialize_headers + if not NO_EXTENSIONS: + _serialize_headers = _c_serialize_headers +except ImportError: + pass diff --git a/venv/lib/python3.8/site-packages/aiohttp/locks.py b/venv/lib/python3.8/site-packages/aiohttp/locks.py new file mode 100644 index 0000000..de2dc83 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/locks.py @@ -0,0 +1,41 @@ +import asyncio +import collections +from typing import Any, Deque, Optional + + +class EventResultOrError: + """Event asyncio lock helper class. + + Wraps the Event asyncio lock allowing either to awake the + locked Tasks without any error or raising an exception. + + thanks to @vorpalsmith for the simple design. + """ + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._exc: Optional[BaseException] = None + self._event = asyncio.Event() + self._waiters: Deque[asyncio.Future[Any]] = collections.deque() + + def set(self, exc: Optional[BaseException] = None) -> None: + self._exc = exc + self._event.set() + + async def wait(self) -> Any: + waiter = self._loop.create_task(self._event.wait()) + self._waiters.append(waiter) + try: + val = await waiter + finally: + self._waiters.remove(waiter) + + if self._exc is not None: + raise self._exc + + return val + + def cancel(self) -> None: + """Cancel all waiters""" + for waiter in self._waiters: + waiter.cancel() diff --git a/venv/lib/python3.8/site-packages/aiohttp/log.py b/venv/lib/python3.8/site-packages/aiohttp/log.py new file mode 120000 index 0000000..87eb248 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/log.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/05/b3/4a/c7d7b754c226d31623648d087010684bbc2375e21e49a88913bfaa2b68 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/multipart.py b/venv/lib/python3.8/site-packages/aiohttp/multipart.py new file mode 100644 index 0000000..73801f4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/multipart.py @@ -0,0 +1,961 @@ +import base64 +import binascii +import json +import re +import uuid +import warnings +import zlib +from collections import deque +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Deque, + Dict, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +from urllib.parse import parse_qsl, unquote, urlencode + +from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping + +from .hdrs import ( + CONTENT_DISPOSITION, + CONTENT_ENCODING, + CONTENT_LENGTH, + CONTENT_TRANSFER_ENCODING, + CONTENT_TYPE, +) +from .helpers import CHAR, TOKEN, parse_mimetype, reify +from .http import HeadersParser +from .payload import ( + JsonPayload, + LookupError, + Order, + Payload, + StringPayload, + get_payload, + payload_type, +) +from .streams import StreamReader + +__all__ = ( + "MultipartReader", + "MultipartWriter", + "BodyPartReader", + "BadContentDispositionHeader", + "BadContentDispositionParam", + "parse_content_disposition", + "content_disposition_filename", +) + + +if TYPE_CHECKING: # pragma: no cover + from .client_reqrep import ClientResponse + + +class BadContentDispositionHeader(RuntimeWarning): + pass + + +class BadContentDispositionParam(RuntimeWarning): + pass + + +def parse_content_disposition( + header: Optional[str], +) -> Tuple[Optional[str], Dict[str, str]]: + def is_token(string: str) -> bool: + return bool(string) and TOKEN >= set(string) + + def is_quoted(string: str) -> bool: + return string[0] == string[-1] == '"' + + def is_rfc5987(string: str) -> bool: + return is_token(string) and string.count("'") == 2 + + def is_extended_param(string: str) -> bool: + return string.endswith("*") + + def is_continuous_param(string: str) -> bool: + pos = string.find("*") + 1 + if not pos: + return False + substring = string[pos:-1] if string.endswith("*") else string[pos:] + return substring.isdigit() + + def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: + return re.sub(f"\\\\([{chars}])", "\\1", text) + + if not header: + return None, {} + + disptype, *parts = header.split(";") + if not is_token(disptype): + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params: Dict[str, str] = {} + while parts: + item = parts.pop(0) + + if "=" not in item: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + key, value = item.split("=", 1) + key = key.lower().strip() + value = value.lstrip() + + if key in params: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + if not is_token(key): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_continuous_param(key): + if is_quoted(value): + value = unescape(value[1:-1]) + elif not is_token(value): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_extended_param(key): + if is_rfc5987(value): + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + else: + warnings.warn(BadContentDispositionParam(item)) + continue + + try: + value = unquote(value, encoding, "strict") + except UnicodeDecodeError: # pragma: nocover + warnings.warn(BadContentDispositionParam(item)) + continue + + else: + failed = True + if is_quoted(value): + failed = False + value = unescape(value[1:-1].lstrip("\\/")) + elif is_token(value): + failed = False + elif parts: + # maybe just ; in filename, in any case this is just + # one case fix, for proper fix we need to redesign parser + _value = f"{value};{parts[0]}" + if is_quoted(_value): + parts.pop(0) + value = unescape(_value[1:-1].lstrip("\\/")) + failed = False + + if failed: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params[key] = value + + return disptype.lower(), params + + +def content_disposition_filename( + params: Mapping[str, str], name: str = "filename" +) -> Optional[str]: + name_suf = "%s*" % name + if not params: + return None + elif name_suf in params: + return params[name_suf] + elif name in params: + return params[name] + else: + parts = [] + fnparams = sorted( + (key, value) for key, value in params.items() if key.startswith(name_suf) + ) + for num, (key, value) in enumerate(fnparams): + _, tail = key.split("*", 1) + if tail.endswith("*"): + tail = tail[:-1] + if tail == str(num): + parts.append(value) + else: + break + if not parts: + return None + value = "".join(parts) + if "'" in value: + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + return unquote(value, encoding, "strict") + return value + + +class MultipartResponseWrapper: + """Wrapper around the MultipartReader. + + It takes care about + underlying connection and close it when it needs in. + """ + + def __init__( + self, + resp: "ClientResponse", + stream: "MultipartReader", + ) -> None: + self.resp = resp + self.stream = stream + + def __aiter__(self) -> "MultipartResponseWrapper": + return self + + async def __anext__( + self, + ) -> Union["MultipartReader", "BodyPartReader"]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + def at_eof(self) -> bool: + """Returns True when all response data had been read.""" + return self.resp.content.at_eof() + + async def next( + self, + ) -> Optional[Union["MultipartReader", "BodyPartReader"]]: + """Emits next multipart reader object.""" + item = await self.stream.next() + if self.stream.at_eof(): + await self.release() + return item + + async def release(self) -> None: + """Release the connection gracefully. + + All remaining content is read to the void. + """ + await self.resp.release() + + +class BodyPartReader: + """Multipart reader for single body part.""" + + chunk_size = 8192 + + def __init__( + self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader + ) -> None: + self.headers = headers + self._boundary = boundary + self._content = content + self._at_eof = False + length = self.headers.get(CONTENT_LENGTH, None) + self._length = int(length) if length is not None else None + self._read_bytes = 0 + # TODO: typeing.Deque is not supported by Python 3.5 + self._unread: Deque[bytes] = deque() + self._prev_chunk: Optional[bytes] = None + self._content_eof = 0 + self._cache: Dict[str, Any] = {} + + def __aiter__(self) -> AsyncIterator["BodyPartReader"]: + return self # type: ignore[return-value] + + async def __anext__(self) -> bytes: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + async def next(self) -> Optional[bytes]: + item = await self.read() + if not item: + return None + return item + + async def read(self, *, decode: bool = False) -> bytes: + """Reads body part data. + + decode: Decodes data following by encoding + method from Content-Encoding header. If it missed + data remains untouched + """ + if self._at_eof: + return b"" + data = bytearray() + while not self._at_eof: + data.extend(await self.read_chunk(self.chunk_size)) + if decode: + return self.decode(data) + return data + + async def read_chunk(self, size: int = chunk_size) -> bytes: + """Reads body part content chunk of the specified size. + + size: chunk size + """ + if self._at_eof: + return b"" + if self._length: + chunk = await self._read_chunk_from_length(size) + else: + chunk = await self._read_chunk_from_stream(size) + + self._read_bytes += len(chunk) + if self._read_bytes == self._length: + self._at_eof = True + if self._at_eof: + clrf = await self._content.readline() + assert ( + b"\r\n" == clrf + ), "reader did not read all the data or it is malformed" + return chunk + + async def _read_chunk_from_length(self, size: int) -> bytes: + # Reads body part content chunk of the specified size. + # The body part must has Content-Length header with proper value. + assert self._length is not None, "Content-Length required for chunked read" + chunk_size = min(size, self._length - self._read_bytes) + chunk = await self._content.read(chunk_size) + return chunk + + async def _read_chunk_from_stream(self, size: int) -> bytes: + # Reads content chunk of body part with unknown length. + # The Content-Length header for body part is not necessary. + assert ( + size >= len(self._boundary) + 2 + ), "Chunk size must be greater or equal than boundary length + 2" + first_chunk = self._prev_chunk is None + if first_chunk: + self._prev_chunk = await self._content.read(size) + + chunk = await self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + assert self._prev_chunk is not None + window = self._prev_chunk + chunk + sub = b"\r\n" + self._boundary + if first_chunk: + idx = window.find(sub) + else: + idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) + if idx >= 0: + # pushing boundary back to content + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + self._content.unread_data(window[idx:]) + if size > idx: + self._prev_chunk = self._prev_chunk[:idx] + chunk = window[len(self._prev_chunk) : idx] + if not chunk: + self._at_eof = True + result = self._prev_chunk + self._prev_chunk = chunk + return result + + async def readline(self) -> bytes: + """Reads body part by line by line.""" + if self._at_eof: + return b"" + + if self._unread: + line = self._unread.popleft() + else: + line = await self._content.readline() + + if line.startswith(self._boundary): + # the very last boundary may not come with \r\n, + # so set single rules for everyone + sline = line.rstrip(b"\r\n") + boundary = self._boundary + last_boundary = self._boundary + b"--" + # ensure that we read exactly the boundary, not something alike + if sline == boundary or sline == last_boundary: + self._at_eof = True + self._unread.append(line) + return b"" + else: + next_line = await self._content.readline() + if next_line.startswith(self._boundary): + line = line[:-2] # strip CRLF but only once + self._unread.append(next_line) + + return line + + async def release(self) -> None: + """Like read(), but reads all the data to the void.""" + if self._at_eof: + return + while not self._at_eof: + await self.read_chunk(self.chunk_size) + + async def text(self, *, encoding: Optional[str] = None) -> str: + """Like read(), but assumes that body part contains text data.""" + data = await self.read(decode=True) + # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA + # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA + encoding = encoding or self.get_charset(default="utf-8") + return data.decode(encoding) + + async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]: + """Like read(), but assumes that body parts contains JSON data.""" + data = await self.read(decode=True) + if not data: + return None + encoding = encoding or self.get_charset(default="utf-8") + return cast(Dict[str, Any], json.loads(data.decode(encoding))) + + async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: + """Like read(), but assumes that body parts contain form urlencoded data.""" + data = await self.read(decode=True) + if not data: + return [] + if encoding is not None: + real_encoding = encoding + else: + real_encoding = self.get_charset(default="utf-8") + return parse_qsl( + data.rstrip().decode(real_encoding), + keep_blank_values=True, + encoding=real_encoding, + ) + + def at_eof(self) -> bool: + """Returns True if the boundary was reached or False otherwise.""" + return self._at_eof + + def decode(self, data: bytes) -> bytes: + """Decodes data. + + Decoding is done according the specified Content-Encoding + or Content-Transfer-Encoding headers value. + """ + if CONTENT_TRANSFER_ENCODING in self.headers: + data = self._decode_content_transfer(data) + if CONTENT_ENCODING in self.headers: + return self._decode_content(data) + return data + + def _decode_content(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_ENCODING, "").lower() + + if encoding == "deflate": + return zlib.decompress(data, -zlib.MAX_WBITS) + elif encoding == "gzip": + return zlib.decompress(data, 16 + zlib.MAX_WBITS) + elif encoding == "identity": + return data + else: + raise RuntimeError(f"unknown content encoding: {encoding}") + + def _decode_content_transfer(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + + if encoding == "base64": + return base64.b64decode(data) + elif encoding == "quoted-printable": + return binascii.a2b_qp(data) + elif encoding in ("binary", "8bit", "7bit"): + return data + else: + raise RuntimeError( + "unknown content transfer encoding: {}" "".format(encoding) + ) + + def get_charset(self, default: str) -> str: + """Returns charset parameter from Content-Type header or default.""" + ctype = self.headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + return mimetype.parameters.get("charset", default) + + @reify + def name(self) -> Optional[str]: + """Returns name specified in Content-Disposition header. + + If the header is missing or malformed, returns None. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "name") + + @reify + def filename(self) -> Optional[str]: + """Returns filename specified in Content-Disposition header. + + Returns None if the header is missing or malformed. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "filename") + + +@payload_type(BodyPartReader, order=Order.try_first) +class BodyPartReaderPayload(Payload): + def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value, *args, **kwargs) + + params: Dict[str, str] = {} + if value.name is not None: + params["name"] = value.name + if value.filename is not None: + params["filename"] = value.filename + + if params: + self.set_content_disposition("attachment", True, **params) + + async def write(self, writer: Any) -> None: + field = self._value + chunk = await field.read_chunk(size=2**16) + while chunk: + await writer.write(field.decode(chunk)) + chunk = await field.read_chunk(size=2**16) + + +class MultipartReader: + """Multipart body reader.""" + + #: Response wrapper, used when multipart readers constructs from response. + response_wrapper_cls = MultipartResponseWrapper + #: Multipart reader class, used to handle multipart/* body parts. + #: None points to type(self) + multipart_reader_cls = None + #: Body part reader class for non multipart/* content types. + part_reader_cls = BodyPartReader + + def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: + self.headers = headers + self._boundary = ("--" + self._get_boundary()).encode() + self._content = content + self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None + self._at_eof = False + self._at_bof = True + self._unread: List[bytes] = [] + + def __aiter__( + self, + ) -> AsyncIterator["BodyPartReader"]: + return self # type: ignore[return-value] + + async def __anext__( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + @classmethod + def from_response( + cls, + response: "ClientResponse", + ) -> MultipartResponseWrapper: + """Constructs reader instance from HTTP response. + + :param response: :class:`~aiohttp.client.ClientResponse` instance + """ + obj = cls.response_wrapper_cls( + response, cls(response.headers, response.content) + ) + return obj + + def at_eof(self) -> bool: + """Returns True if the final boundary was reached, false otherwise.""" + return self._at_eof + + async def next( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + """Emits the next multipart body part.""" + # So, if we're at BOF, we need to skip till the boundary. + if self._at_eof: + return None + await self._maybe_release_last_part() + if self._at_bof: + await self._read_until_first_boundary() + self._at_bof = False + else: + await self._read_boundary() + if self._at_eof: # we just read the last boundary, nothing to do there + return None + self._last_part = await self.fetch_next_part() + return self._last_part + + async def release(self) -> None: + """Reads all the body parts to the void till the final boundary.""" + while not self._at_eof: + item = await self.next() + if item is None: + break + await item.release() + + async def fetch_next_part( + self, + ) -> Union["MultipartReader", BodyPartReader]: + """Returns the next body part reader.""" + headers = await self._read_headers() + return self._get_part_reader(headers) + + def _get_part_reader( + self, + headers: "CIMultiDictProxy[str]", + ) -> Union["MultipartReader", BodyPartReader]: + """Dispatches the response by the `Content-Type` header. + + Returns a suitable reader instance. + + :param dict headers: Response headers + """ + ctype = headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + + if mimetype.type == "multipart": + if self.multipart_reader_cls is None: + return type(self)(headers, self._content) + return self.multipart_reader_cls(headers, self._content) + else: + return self.part_reader_cls(self._boundary, headers, self._content) + + def _get_boundary(self) -> str: + mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) + + assert mimetype.type == "multipart", "multipart/* content type expected" + + if "boundary" not in mimetype.parameters: + raise ValueError( + "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE] + ) + + boundary = mimetype.parameters["boundary"] + if len(boundary) > 70: + raise ValueError("boundary %r is too long (70 chars max)" % boundary) + + return boundary + + async def _readline(self) -> bytes: + if self._unread: + return self._unread.pop() + return await self._content.readline() + + async def _read_until_first_boundary(self) -> None: + while True: + chunk = await self._readline() + if chunk == b"": + raise ValueError( + "Could not find starting boundary %r" % (self._boundary) + ) + chunk = chunk.rstrip() + if chunk == self._boundary: + return + elif chunk == self._boundary + b"--": + self._at_eof = True + return + + async def _read_boundary(self) -> None: + chunk = (await self._readline()).rstrip() + if chunk == self._boundary: + pass + elif chunk == self._boundary + b"--": + self._at_eof = True + epilogue = await self._readline() + next_line = await self._readline() + + # the epilogue is expected and then either the end of input or the + # parent multipart boundary, if the parent boundary is found then + # it should be marked as unread and handed to the parent for + # processing + if next_line[:2] == b"--": + self._unread.append(next_line) + # otherwise the request is likely missing an epilogue and both + # lines should be passed to the parent for processing + # (this handles the old behavior gracefully) + else: + self._unread.extend([next_line, epilogue]) + else: + raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}") + + async def _read_headers(self) -> "CIMultiDictProxy[str]": + lines = [b""] + while True: + chunk = await self._content.readline() + chunk = chunk.strip() + lines.append(chunk) + if not chunk: + break + parser = HeadersParser() + headers, raw_headers = parser.parse_headers(lines) + return headers + + async def _maybe_release_last_part(self) -> None: + """Ensures that the last read body part is read completely.""" + if self._last_part is not None: + if not self._last_part.at_eof(): + await self._last_part.release() + self._unread.extend(self._last_part._unread) + self._last_part = None + + +_Part = Tuple[Payload, str, str] + + +class MultipartWriter(Payload): + """Multipart body writer.""" + + def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: + boundary = boundary if boundary is not None else uuid.uuid4().hex + # The underlying Payload API demands a str (utf-8), not bytes, + # so we need to ensure we don't lose anything during conversion. + # As a result, require the boundary to be ASCII only. + # In both situations. + + try: + self._boundary = boundary.encode("ascii") + except UnicodeEncodeError: + raise ValueError("boundary should contain ASCII only chars") from None + ctype = f"multipart/{subtype}; boundary={self._boundary_value}" + + super().__init__(None, content_type=ctype) + + self._parts: List[_Part] = [] + + def __enter__(self) -> "MultipartWriter": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + pass + + def __iter__(self) -> Iterator[_Part]: + return iter(self._parts) + + def __len__(self) -> int: + return len(self._parts) + + def __bool__(self) -> bool: + return True + + _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z") + _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]") + + @property + def _boundary_value(self) -> str: + """Wrap boundary parameter value in quotes, if necessary. + + Reads self.boundary and returns a unicode sting. + """ + # Refer to RFCs 7231, 7230, 5234. + # + # parameter = token "=" ( token / quoted-string ) + # token = 1*tchar + # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE + # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text + # obs-text = %x80-FF + # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + # / DIGIT / ALPHA + # ; any VCHAR, except delimiters + # VCHAR = %x21-7E + value = self._boundary + if re.match(self._valid_tchar_regex, value): + return value.decode("ascii") # cannot fail + + if re.search(self._invalid_qdtext_char_regex, value): + raise ValueError("boundary value contains invalid characters") + + # escape %x5C and %x22 + quoted_value_content = value.replace(b"\\", b"\\\\") + quoted_value_content = quoted_value_content.replace(b'"', b'\\"') + + return '"' + quoted_value_content.decode("ascii") + '"' + + @property + def boundary(self) -> str: + return self._boundary.decode("ascii") + + def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload: + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Payload): + obj.headers.update(headers) + return self.append_payload(obj) + else: + try: + payload = get_payload(obj, headers=headers) + except LookupError: + raise TypeError("Cannot create payload from %r" % obj) + else: + return self.append_payload(payload) + + def append_payload(self, payload: Payload) -> Payload: + """Adds a new body part to multipart writer.""" + # compression + encoding: Optional[str] = payload.headers.get( + CONTENT_ENCODING, + "", + ).lower() + if encoding and encoding not in ("deflate", "gzip", "identity"): + raise RuntimeError(f"unknown content encoding: {encoding}") + if encoding == "identity": + encoding = None + + # te encoding + te_encoding: Optional[str] = payload.headers.get( + CONTENT_TRANSFER_ENCODING, + "", + ).lower() + if te_encoding not in ("", "base64", "quoted-printable", "binary"): + raise RuntimeError( + "unknown content transfer encoding: {}" "".format(te_encoding) + ) + if te_encoding == "binary": + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) + + self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] + return payload + + def append_json( + self, obj: Any, headers: Optional[MultiMapping[str]] = None + ) -> Payload: + """Helper to append JSON part.""" + if headers is None: + headers = CIMultiDict() + + return self.append_payload(JsonPayload(obj, headers=headers)) + + def append_form( + self, + obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], + headers: Optional[MultiMapping[str]] = None, + ) -> Payload: + """Helper to append form urlencoded part.""" + assert isinstance(obj, (Sequence, Mapping)) + + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Mapping): + obj = list(obj.items()) + data = urlencode(obj, doseq=True) + + return self.append_payload( + StringPayload( + data, headers=headers, content_type="application/x-www-form-urlencoded" + ) + ) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + total = 0 + for part, encoding, te_encoding in self._parts: + if encoding or te_encoding or part.size is None: + return None + + total += int( + 2 + + len(self._boundary) + + 2 + + part.size # b'--'+self._boundary+b'\r\n' + + len(part._binary_headers) + + 2 # b'\r\n' + ) + + total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' + return total + + async def write(self, writer: Any, close_boundary: bool = True) -> None: + """Write body.""" + for part, encoding, te_encoding in self._parts: + await writer.write(b"--" + self._boundary + b"\r\n") + await writer.write(part._binary_headers) + + if encoding or te_encoding: + w = MultipartPayloadWriter(writer) + if encoding: + w.enable_compression(encoding) + if te_encoding: + w.enable_encoding(te_encoding) + await part.write(w) # type: ignore[arg-type] + await w.write_eof() + else: + await part.write(writer) + + await writer.write(b"\r\n") + + if close_boundary: + await writer.write(b"--" + self._boundary + b"--\r\n") + + +class MultipartPayloadWriter: + def __init__(self, writer: Any) -> None: + self._writer = writer + self._encoding: Optional[str] = None + self._compress: Any = None + self._encoding_buffer: Optional[bytearray] = None + + def enable_encoding(self, encoding: str) -> None: + if encoding == "base64": + self._encoding = encoding + self._encoding_buffer = bytearray() + elif encoding == "quoted-printable": + self._encoding = "quoted-printable" + + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS + self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy) + + async def write_eof(self) -> None: + if self._compress is not None: + chunk = self._compress.flush() + if chunk: + self._compress = None + await self.write(chunk) + + if self._encoding == "base64": + if self._encoding_buffer: + await self._writer.write(base64.b64encode(self._encoding_buffer)) + + async def write(self, chunk: bytes) -> None: + if self._compress is not None: + if chunk: + chunk = self._compress.compress(chunk) + if not chunk: + return + + if self._encoding == "base64": + buf = self._encoding_buffer + assert buf is not None + buf.extend(chunk) + + if buf: + div, mod = divmod(len(buf), 3) + enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :]) + if enc_chunk: + b64chunk = base64.b64encode(enc_chunk) + await self._writer.write(b64chunk) + elif self._encoding == "quoted-printable": + await self._writer.write(binascii.b2a_qp(chunk)) + else: + await self._writer.write(chunk) diff --git a/venv/lib/python3.8/site-packages/aiohttp/payload.py b/venv/lib/python3.8/site-packages/aiohttp/payload.py new file mode 100644 index 0000000..625b2ea --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/payload.py @@ -0,0 +1,465 @@ +import asyncio +import enum +import io +import json +import mimetypes +import os +import warnings +from abc import ABC, abstractmethod +from itertools import chain +from typing import ( + IO, + TYPE_CHECKING, + Any, + ByteString, + Dict, + Iterable, + Optional, + TextIO, + Tuple, + Type, + Union, +) + +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + PY_36, + content_disposition_header, + guess_filename, + parse_mimetype, + sentinel, +) +from .streams import StreamReader +from .typedefs import Final, JSONEncoder, _CIMultiDict + +__all__ = ( + "PAYLOAD_REGISTRY", + "get_payload", + "payload_type", + "Payload", + "BytesPayload", + "StringPayload", + "IOBasePayload", + "BytesIOPayload", + "BufferedReaderPayload", + "TextIOPayload", + "StringIOPayload", + "JsonPayload", + "AsyncIterablePayload", +) + +TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB + +if TYPE_CHECKING: # pragma: no cover + from typing import List + + +class LookupError(Exception): + pass + + +class Order(str, enum.Enum): + normal = "normal" + try_first = "try_first" + try_last = "try_last" + + +def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload": + return PAYLOAD_REGISTRY.get(data, *args, **kwargs) + + +def register_payload( + factory: Type["Payload"], type: Any, *, order: Order = Order.normal +) -> None: + PAYLOAD_REGISTRY.register(factory, type, order=order) + + +class payload_type: + def __init__(self, type: Any, *, order: Order = Order.normal) -> None: + self.type = type + self.order = order + + def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: + register_payload(factory, self.type, order=self.order) + return factory + + +PayloadType = Type["Payload"] +_PayloadRegistryItem = Tuple[PayloadType, Any] + + +class PayloadRegistry: + """Payload registry. + + note: we need zope.interface for more efficient adapter search + """ + + def __init__(self) -> None: + self._first: List[_PayloadRegistryItem] = [] + self._normal: List[_PayloadRegistryItem] = [] + self._last: List[_PayloadRegistryItem] = [] + + def get( + self, + data: Any, + *args: Any, + _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, + **kwargs: Any, + ) -> "Payload": + if isinstance(data, Payload): + return data + for factory, type in _CHAIN(self._first, self._normal, self._last): + if isinstance(data, type): + return factory(data, *args, **kwargs) + + raise LookupError() + + def register( + self, factory: PayloadType, type: Any, *, order: Order = Order.normal + ) -> None: + if order is Order.try_first: + self._first.append((factory, type)) + elif order is Order.normal: + self._normal.append((factory, type)) + elif order is Order.try_last: + self._last.append((factory, type)) + else: + raise ValueError(f"Unsupported order {order!r}") + + +class Payload(ABC): + + _default_content_type: str = "application/octet-stream" + _size: Optional[int] = None + + def __init__( + self, + value: Any, + headers: Optional[ + Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]] + ] = None, + content_type: Optional[str] = sentinel, + filename: Optional[str] = None, + encoding: Optional[str] = None, + **kwargs: Any, + ) -> None: + self._encoding = encoding + self._filename = filename + self._headers: _CIMultiDict = CIMultiDict() + self._value = value + if content_type is not sentinel and content_type is not None: + self._headers[hdrs.CONTENT_TYPE] = content_type + elif self._filename is not None: + content_type = mimetypes.guess_type(self._filename)[0] + if content_type is None: + content_type = self._default_content_type + self._headers[hdrs.CONTENT_TYPE] = content_type + else: + self._headers[hdrs.CONTENT_TYPE] = self._default_content_type + self._headers.update(headers or {}) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + return self._size + + @property + def filename(self) -> Optional[str]: + """Filename of the payload.""" + return self._filename + + @property + def headers(self) -> _CIMultiDict: + """Custom item headers""" + return self._headers + + @property + def _binary_headers(self) -> bytes: + return ( + "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode( + "utf-8" + ) + + b"\r\n" + ) + + @property + def encoding(self) -> Optional[str]: + """Payload encoding""" + return self._encoding + + @property + def content_type(self) -> str: + """Content type""" + return self._headers[hdrs.CONTENT_TYPE] + + def set_content_disposition( + self, + disptype: str, + quote_fields: bool = True, + _charset: str = "utf-8", + **params: Any, + ) -> None: + """Sets ``Content-Disposition`` header.""" + self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( + disptype, quote_fields=quote_fields, _charset=_charset, **params + ) + + @abstractmethod + async def write(self, writer: AbstractStreamWriter) -> None: + """Write payload. + + writer is an AbstractStreamWriter instance: + """ + + +class BytesPayload(Payload): + def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None: + if not isinstance(value, (bytes, bytearray, memoryview)): + raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") + + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + if isinstance(value, memoryview): + self._size = value.nbytes + else: + self._size = len(value) + + if self._size > TOO_LARGE_BYTES_BODY: + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + warnings.warn( + "Sending a large body directly with raw bytes might" + " lock the event loop. You should probably pass an " + "io.BytesIO object instead", + ResourceWarning, + **kwargs, + ) + + async def write(self, writer: AbstractStreamWriter) -> None: + await writer.write(self._value) + + +class StringPayload(BytesPayload): + def __init__( + self, + value: str, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + real_encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + real_encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + real_encoding = encoding + + super().__init__( + value.encode(real_encoding), + encoding=real_encoding, + content_type=content_type, + *args, + **kwargs, + ) + + +class StringIOPayload(StringPayload): + def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: + super().__init__(value.read(), *args, **kwargs) + + +class IOBasePayload(Payload): + _value: IO[Any] + + def __init__( + self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any + ) -> None: + if "filename" not in kwargs: + kwargs["filename"] = guess_filename(value) + + super().__init__(value, *args, **kwargs) + + if self._filename is not None and disposition is not None: + if hdrs.CONTENT_DISPOSITION not in self.headers: + self.set_content_disposition(disposition, filename=self._filename) + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + await writer.write(chunk) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + +class TextIOPayload(IOBasePayload): + _value: TextIO + + def __init__( + self, + value: TextIO, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + + super().__init__( + value, + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + return None + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + data = ( + chunk.encode(encoding=self._encoding) + if self._encoding + else chunk.encode() + ) + await writer.write(data) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + +class BytesIOPayload(IOBasePayload): + @property + def size(self) -> int: + position = self._value.tell() + end = self._value.seek(0, os.SEEK_END) + self._value.seek(position) + return end - position + + +class BufferedReaderPayload(IOBasePayload): + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + # data.fileno() is not supported, e.g. + # io.BufferedReader(io.BytesIO(b'data')) + return None + + +class JsonPayload(BytesPayload): + def __init__( + self, + value: Any, + encoding: str = "utf-8", + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, + *args: Any, + **kwargs: Any, + ) -> None: + + super().__init__( + dumps(value).encode(encoding), + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + +if TYPE_CHECKING: # pragma: no cover + from typing import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator[bytes] + _AsyncIterable = AsyncIterable[bytes] +else: + from collections.abc import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator + _AsyncIterable = AsyncIterable + + +class AsyncIterablePayload(Payload): + + _iter: Optional[_AsyncIterator] = None + + def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: + if not isinstance(value, AsyncIterable): + raise TypeError( + "value argument must support " + "collections.abc.AsyncIterablebe interface, " + "got {!r}".format(type(value)) + ) + + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + self._iter = value.__aiter__() + + async def write(self, writer: AbstractStreamWriter) -> None: + if self._iter: + try: + # iter is not None check prevents rare cases + # when the case iterable is used twice + while True: + chunk = await self._iter.__anext__() + await writer.write(chunk) + except StopAsyncIteration: + self._iter = None + + +class StreamReaderPayload(AsyncIterablePayload): + def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value.iter_any(), *args, **kwargs) + + +PAYLOAD_REGISTRY = PayloadRegistry() +PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) +PAYLOAD_REGISTRY.register(StringPayload, str) +PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) +PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) +PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) +PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) +PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) +PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) +# try_last for giving a chance to more specialized async interables like +# multidict.BodyPartReaderPayload override the default +PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last) diff --git a/venv/lib/python3.8/site-packages/aiohttp/payload_streamer.py b/venv/lib/python3.8/site-packages/aiohttp/payload_streamer.py new file mode 120000 index 0000000..db64b28 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/payload_streamer.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/dd/69/99/efb490e1d73c68a53a8b66406fc2fb14f75eb024a8a91a2749539dfe41 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/py.typed b/venv/lib/python3.8/site-packages/aiohttp/py.typed new file mode 120000 index 0000000..a3f4481 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b2/8c/3d/b284f03fd4ff80401049587b19bf3ce79874e0dc2686cd967be2518193 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/pytest_plugin.py b/venv/lib/python3.8/site-packages/aiohttp/pytest_plugin.py new file mode 120000 index 0000000..3657629 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/pytest_plugin.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/ee/4b/7ddae764804fdb02e9f028c4e787ac84ea2515b069393b9fbe9e2d51c2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/resolver.py b/venv/lib/python3.8/site-packages/aiohttp/resolver.py new file mode 120000 index 0000000..998955b --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/resolver.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/08/04/8e/9d7a79a1987fd6c0855b31650fee71dd5e3d1c05db009c3ee51623826b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/streams.py b/venv/lib/python3.8/site-packages/aiohttp/streams.py new file mode 100644 index 0000000..726b023 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/streams.py @@ -0,0 +1,660 @@ +import asyncio +import collections +import warnings +from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar + +from .base_protocol import BaseProtocol +from .helpers import BaseTimerContext, set_exception, set_result +from .log import internal_logger +from .typedefs import Final + +__all__ = ( + "EMPTY_PAYLOAD", + "EofStream", + "StreamReader", + "DataQueue", + "FlowControlDataQueue", +) + +_T = TypeVar("_T") + + +class EofStream(Exception): + """eof stream indication.""" + + +class AsyncStreamIterator(Generic[_T]): + def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: + self.read_func = read_func + + def __aiter__(self) -> "AsyncStreamIterator[_T]": + return self + + async def __anext__(self) -> _T: + try: + rv = await self.read_func() + except EofStream: + raise StopAsyncIteration + if rv == b"": + raise StopAsyncIteration + return rv + + +class ChunkTupleAsyncStreamIterator: + def __init__(self, stream: "StreamReader") -> None: + self._stream = stream + + def __aiter__(self) -> "ChunkTupleAsyncStreamIterator": + return self + + async def __anext__(self) -> Tuple[bytes, bool]: + rv = await self._stream.readchunk() + if rv == (b"", False): + raise StopAsyncIteration + return rv + + +class AsyncStreamReaderMixin: + def __aiter__(self) -> AsyncStreamIterator[bytes]: + return AsyncStreamIterator(self.readline) # type: ignore[attr-defined] + + def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: + """Returns an asynchronous iterator that yields chunks of size n. + + Python-3.5 available for Python 3.5+ only + """ + return AsyncStreamIterator( + lambda: self.read(n) # type: ignore[attr-defined,no-any-return] + ) + + def iter_any(self) -> AsyncStreamIterator[bytes]: + """Yield all available data as soon as it is received. + + Python-3.5 available for Python 3.5+ only + """ + return AsyncStreamIterator(self.readany) # type: ignore[attr-defined] + + def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: + """Yield chunks of data as they are received by the server. + + The yielded objects are tuples + of (bytes, bool) as returned by the StreamReader.readchunk method. + + Python-3.5 available for Python 3.5+ only + """ + return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type] + + +class StreamReader(AsyncStreamReaderMixin): + """An enhancement of asyncio.StreamReader. + + Supports asynchronous iteration by line, chunk or as available:: + + async for line in reader: + ... + async for chunk in reader.iter_chunked(1024): + ... + async for slice in reader.iter_any(): + ... + + """ + + total_bytes = 0 + + def __init__( + self, + protocol: BaseProtocol, + limit: int, + *, + timer: Optional[BaseTimerContext] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + self._protocol = protocol + self._low_water = limit + self._high_water = limit * 2 + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._size = 0 + self._cursor = 0 + self._http_chunk_splits: Optional[List[int]] = None + self._buffer: Deque[bytes] = collections.deque() + self._buffer_offset = 0 + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._eof_waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._timer = timer + self._eof_callbacks: List[Callable[[], None]] = [] + + def __repr__(self) -> str: + info = [self.__class__.__name__] + if self._size: + info.append("%d bytes" % self._size) + if self._eof: + info.append("eof") + if self._low_water != 2**16: # default limit + info.append("low=%d high=%d" % (self._low_water, self._high_water)) + if self._waiter: + info.append("w=%r" % self._waiter) + if self._exception: + info.append("e=%r" % self._exception) + return "<%s>" % " ".join(info) + + def get_read_buffer_limits(self) -> Tuple[int, int]: + return (self._low_water, self._high_water) + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception(self, exc: BaseException) -> None: + self._exception = exc + self._eof_callbacks.clear() + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_exception(waiter, exc) + + def on_eof(self, callback: Callable[[], None]) -> None: + if self._eof: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + else: + self._eof_callbacks.append(callback) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_result(waiter, None) + + for cb in self._eof_callbacks: + try: + cb() + except Exception: + internal_logger.exception("Exception in eof callback") + + self._eof_callbacks.clear() + + def is_eof(self) -> bool: + """Return True if 'feed_eof' was called.""" + return self._eof + + def at_eof(self) -> bool: + """Return True if the buffer is empty and 'feed_eof' was called.""" + return self._eof and not self._buffer + + async def wait_eof(self) -> None: + if self._eof: + return + + assert self._eof_waiter is None + self._eof_waiter = self._loop.create_future() + try: + await self._eof_waiter + finally: + self._eof_waiter = None + + def unread_data(self, data: bytes) -> None: + """rollback reading some data from stream, inserting it to buffer head.""" + warnings.warn( + "unread_data() is deprecated " + "and will be removed in future releases (#3260)", + DeprecationWarning, + stacklevel=2, + ) + if not data: + return + + if self._buffer_offset: + self._buffer[0] = self._buffer[0][self._buffer_offset :] + self._buffer_offset = 0 + self._size += len(data) + self._cursor -= len(data) + self._buffer.appendleft(data) + self._eof_counter = 0 + + # TODO: size is ignored, remove the param later + def feed_data(self, data: bytes, size: int = 0) -> None: + assert not self._eof, "feed_data after feed_eof" + + if not data: + return + + self._size += len(data) + self._buffer.append(data) + self.total_bytes += len(data) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + if self._size > self._high_water and not self._protocol._reading_paused: + self._protocol.pause_reading() + + def begin_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + if self.total_bytes: + raise RuntimeError( + "Called begin_http_chunk_receiving when" "some data was already fed" + ) + self._http_chunk_splits = [] + + def end_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + raise RuntimeError( + "Called end_chunk_receiving without calling " + "begin_chunk_receiving first" + ) + + # self._http_chunk_splits contains logical byte offsets from start of + # the body transfer. Each offset is the offset of the end of a chunk. + # "Logical" means bytes, accessible for a user. + # If no chunks containig logical data were received, current position + # is difinitely zero. + pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0 + + if self.total_bytes == pos: + # We should not add empty chunks here. So we check for that. + # Note, when chunked + gzip is used, we can receive a chunk + # of compressed data, but that data may not be enough for gzip FSM + # to yield any uncompressed data. That's why current position may + # not change after receiving a chunk. + return + + self._http_chunk_splits.append(self.total_bytes) + + # wake up readchunk when end of http chunk received + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + async def _wait(self, func_name: str) -> None: + # StreamReader uses a future to link the protocol feed_data() method + # to a read coroutine. Running two read coroutines at the same time + # would have an unexpected behaviour. It would not possible to know + # which coroutine would get the next data. + if self._waiter is not None: + raise RuntimeError( + "%s() called while another coroutine is " + "already waiting for incoming data" % func_name + ) + + waiter = self._waiter = self._loop.create_future() + try: + if self._timer: + with self._timer: + await waiter + else: + await waiter + finally: + self._waiter = None + + async def readline(self) -> bytes: + return await self.readuntil() + + async def readuntil(self, separator: bytes = b"\n") -> bytes: + seplen = len(separator) + if seplen == 0: + raise ValueError("Separator should be at least one-byte string") + + if self._exception is not None: + raise self._exception + + chunk = b"" + chunk_size = 0 + not_enough = True + + while not_enough: + while self._buffer and not_enough: + offset = self._buffer_offset + ichar = self._buffer[0].find(separator, offset) + 1 + # Read from current offset to found separator or to the end. + data = self._read_nowait_chunk(ichar - offset if ichar else -1) + chunk += data + chunk_size += len(data) + if ichar: + not_enough = False + + if chunk_size > self._high_water: + raise ValueError("Chunk too big") + + if self._eof: + break + + if not_enough: + await self._wait("readuntil") + + return chunk + + async def read(self, n: int = -1) -> bytes: + if self._exception is not None: + raise self._exception + + # migration problem; with DataQueue you have to catch + # EofStream exception, so common way is to run payload.read() inside + # infinite loop. what can cause real infinite loop with StreamReader + # lets keep this code one major release. + if __debug__: + if self._eof and not self._buffer: + self._eof_counter = getattr(self, "_eof_counter", 0) + 1 + if self._eof_counter > 5: + internal_logger.warning( + "Multiple access to StreamReader in eof state, " + "might be infinite loop.", + stack_info=True, + ) + + if not n: + return b"" + + if n < 0: + # This used to just loop creating a new waiter hoping to + # collect everything in self._buffer, but that would + # deadlock if the subprocess sends more than self.limit + # bytes. So just call self.readany() until EOF. + blocks = [] + while True: + block = await self.readany() + if not block: + break + blocks.append(block) + return b"".join(blocks) + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("read") + + return self._read_nowait(n) + + async def readany(self) -> bytes: + if self._exception is not None: + raise self._exception + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("readany") + + return self._read_nowait(-1) + + async def readchunk(self) -> Tuple[bytes, bool]: + """Returns a tuple of (data, end_of_http_chunk). + + When chunked transfer + encoding is used, end_of_http_chunk is a boolean indicating if the end + of the data corresponds to the end of a HTTP chunk , otherwise it is + always False. + """ + while True: + if self._exception is not None: + raise self._exception + + while self._http_chunk_splits: + pos = self._http_chunk_splits.pop(0) + if pos == self._cursor: + return (b"", True) + if pos > self._cursor: + return (self._read_nowait(pos - self._cursor), True) + internal_logger.warning( + "Skipping HTTP chunk end due to data " + "consumption beyond chunk boundary" + ) + + if self._buffer: + return (self._read_nowait_chunk(-1), False) + # return (self._read_nowait(-1), False) + + if self._eof: + # Special case for signifying EOF. + # (b'', True) is not a final return value actually. + return (b"", False) + + await self._wait("readchunk") + + async def readexactly(self, n: int) -> bytes: + if self._exception is not None: + raise self._exception + + blocks: List[bytes] = [] + while n > 0: + block = await self.read(n) + if not block: + partial = b"".join(blocks) + raise asyncio.IncompleteReadError(partial, len(partial) + n) + blocks.append(block) + n -= len(block) + + return b"".join(blocks) + + def read_nowait(self, n: int = -1) -> bytes: + # default was changed to be consistent with .read(-1) + # + # I believe the most users don't know about the method and + # they are not affected. + if self._exception is not None: + raise self._exception + + if self._waiter and not self._waiter.done(): + raise RuntimeError( + "Called while some coroutine is waiting for incoming data." + ) + + return self._read_nowait(n) + + def _read_nowait_chunk(self, n: int) -> bytes: + first_buffer = self._buffer[0] + offset = self._buffer_offset + if n != -1 and len(first_buffer) - offset > n: + data = first_buffer[offset : offset + n] + self._buffer_offset += n + + elif offset: + self._buffer.popleft() + data = first_buffer[offset:] + self._buffer_offset = 0 + + else: + data = self._buffer.popleft() + + self._size -= len(data) + self._cursor += len(data) + + chunk_splits = self._http_chunk_splits + # Prevent memory leak: drop useless chunk splits + while chunk_splits and chunk_splits[0] < self._cursor: + chunk_splits.pop(0) + + if self._size < self._low_water and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + + def _read_nowait(self, n: int) -> bytes: + """Read not more than n bytes, or whole buffer if n == -1""" + chunks = [] + + while self._buffer: + chunk = self._read_nowait_chunk(n) + chunks.append(chunk) + if n != -1: + n -= len(chunk) + if n == 0: + break + + return b"".join(chunks) if chunks else b"" + + +class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] + def __init__(self) -> None: + pass + + def exception(self) -> Optional[BaseException]: + return None + + def set_exception(self, exc: BaseException) -> None: + pass + + def on_eof(self, callback: Callable[[], None]) -> None: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + + def feed_eof(self) -> None: + pass + + def is_eof(self) -> bool: + return True + + def at_eof(self) -> bool: + return True + + async def wait_eof(self) -> None: + return + + def feed_data(self, data: bytes, n: int = 0) -> None: + pass + + async def readline(self) -> bytes: + return b"" + + async def read(self, n: int = -1) -> bytes: + return b"" + + # TODO add async def readuntil + + async def readany(self) -> bytes: + return b"" + + async def readchunk(self) -> Tuple[bytes, bool]: + return (b"", True) + + async def readexactly(self, n: int) -> bytes: + raise asyncio.IncompleteReadError(b"", n) + + def read_nowait(self, n: int = -1) -> bytes: + return b"" + + +EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader() + + +class DataQueue(Generic[_T]): + """DataQueue is a general-purpose blocking queue with one reader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._size = 0 + self._buffer: Deque[Tuple[_T, int]] = collections.deque() + + def __len__(self) -> int: + return len(self._buffer) + + def is_eof(self) -> bool: + return self._eof + + def at_eof(self) -> bool: + return self._eof and not self._buffer + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception(self, exc: BaseException) -> None: + self._eof = True + self._exception = exc + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc) + + def feed_data(self, data: _T, size: int = 0) -> None: + self._size += size + self._buffer.append((data, size)) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + async def read(self) -> _T: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size + return data + else: + if self._exception is not None: + raise self._exception + else: + raise EofStream + + def __aiter__(self) -> AsyncStreamIterator[_T]: + return AsyncStreamIterator(self.read) + + +class FlowControlDataQueue(DataQueue[_T]): + """FlowControlDataQueue resumes and pauses an underlying stream. + + It is a destination for parsed data. + """ + + def __init__( + self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop + ) -> None: + super().__init__(loop=loop) + + self._protocol = protocol + self._limit = limit * 2 + + def feed_data(self, data: _T, size: int = 0) -> None: + super().feed_data(data, size) + + if self._size > self._limit and not self._protocol._reading_paused: + self._protocol.pause_reading() + + async def read(self) -> _T: + try: + return await super().read() + finally: + if self._size < self._limit and self._protocol._reading_paused: + self._protocol.resume_reading() diff --git a/venv/lib/python3.8/site-packages/aiohttp/tcp_helpers.py b/venv/lib/python3.8/site-packages/aiohttp/tcp_helpers.py new file mode 100644 index 0000000..88b2442 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/tcp_helpers.py @@ -0,0 +1,37 @@ +"""Helper methods to tune a TCP connection""" + +import asyncio +import socket +from contextlib import suppress +from typing import Optional # noqa + +__all__ = ("tcp_keepalive", "tcp_nodelay") + + +if hasattr(socket, "SO_KEEPALIVE"): + + def tcp_keepalive(transport: asyncio.Transport) -> None: + sock = transport.get_extra_info("socket") + if sock is not None: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + +else: + + def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover + pass + + +def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None: + sock = transport.get_extra_info("socket") + + if sock is None: + return + + if sock.family not in (socket.AF_INET, socket.AF_INET6): + return + + value = bool(value) + + # socket may be closed already, on windows OSError get raised + with suppress(OSError): + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value) diff --git a/venv/lib/python3.8/site-packages/aiohttp/test_utils.py b/venv/lib/python3.8/site-packages/aiohttp/test_utils.py new file mode 100644 index 0000000..fcda2f3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/test_utils.py @@ -0,0 +1,706 @@ +"""Utilities shared by tests.""" + +import asyncio +import contextlib +import gc +import inspect +import ipaddress +import os +import socket +import sys +import warnings +from abc import ABC, abstractmethod +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterator, + List, + Optional, + Type, + Union, + cast, +) +from unittest import mock + +from aiosignal import Signal +from multidict import CIMultiDict, CIMultiDictProxy +from yarl import URL + +import aiohttp +from aiohttp.client import _RequestContextManager, _WSRequestContextManager + +from . import ClientSession, hdrs +from .abc import AbstractCookieJar +from .client_reqrep import ClientResponse +from .client_ws import ClientWebSocketResponse +from .helpers import PY_38, sentinel +from .http import HttpVersion, RawRequestMessage +from .web import ( + Application, + AppRunner, + BaseRunner, + Request, + Server, + ServerRunner, + SockSite, + UrlMappingMatchInfo, +) +from .web_protocol import _RequestHandler + +if TYPE_CHECKING: # pragma: no cover + from ssl import SSLContext +else: + SSLContext = None + +if PY_38: + from unittest import IsolatedAsyncioTestCase as TestCase +else: + from asynctest import TestCase # type: ignore[no-redef] + +REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" + + +def get_unused_port_socket( + host: str, family: socket.AddressFamily = socket.AF_INET +) -> socket.socket: + return get_port_socket(host, 0, family) + + +def get_port_socket( + host: str, port: int, family: socket.AddressFamily +) -> socket.socket: + s = socket.socket(family, socket.SOCK_STREAM) + if REUSE_ADDRESS: + # Windows has different semantics for SO_REUSEADDR, + # so don't set it. Ref: + # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.bind((host, port)) + return s + + +def unused_port() -> int: + """Return a port that is unused on the current host.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(("127.0.0.1", 0)) + return cast(int, s.getsockname()[1]) + + +class BaseTestServer(ABC): + __test__ = False + + def __init__( + self, + *, + scheme: Union[str, object] = sentinel, + loop: Optional[asyncio.AbstractEventLoop] = None, + host: str = "127.0.0.1", + port: Optional[int] = None, + skip_url_asserts: bool = False, + socket_factory: Callable[ + [str, int, socket.AddressFamily], socket.socket + ] = get_port_socket, + **kwargs: Any, + ) -> None: + self._loop = loop + self.runner: Optional[BaseRunner] = None + self._root: Optional[URL] = None + self.host = host + self.port = port + self._closed = False + self.scheme = scheme + self.skip_url_asserts = skip_url_asserts + self.socket_factory = socket_factory + + async def start_server( + self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any + ) -> None: + if self.runner: + return + self._loop = loop + self._ssl = kwargs.pop("ssl", None) + self.runner = await self._make_runner(**kwargs) + await self.runner.setup() + if not self.port: + self.port = 0 + try: + version = ipaddress.ip_address(self.host).version + except ValueError: + version = 4 + family = socket.AF_INET6 if version == 6 else socket.AF_INET + _sock = self.socket_factory(self.host, self.port, family) + self.host, self.port = _sock.getsockname()[:2] + site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) + await site.start() + server = site._server + assert server is not None + sockets = server.sockets + assert sockets is not None + self.port = sockets[0].getsockname()[1] + if self.scheme is sentinel: + if self._ssl: + scheme = "https" + else: + scheme = "http" + self.scheme = scheme + self._root = URL(f"{self.scheme}://{self.host}:{self.port}") + + @abstractmethod # pragma: no cover + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + pass + + def make_url(self, path: str) -> URL: + assert self._root is not None + url = URL(path) + if not self.skip_url_asserts: + assert not url.is_absolute() + return self._root.join(url) + else: + return URL(str(self._root) + path) + + @property + def started(self) -> bool: + return self.runner is not None + + @property + def closed(self) -> bool: + return self._closed + + @property + def handler(self) -> Server: + # for backward compatibility + # web.Server instance + runner = self.runner + assert runner is not None + assert runner.server is not None + return runner.server + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run when the object is garbage collected, and on + exit when used as a context manager. + + """ + if self.started and not self.closed: + assert self.runner is not None + await self.runner.cleanup() + self._root = None + self.port = None + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "BaseTestServer": + await self.start_server(loop=self._loop) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + await self.close() + + +class TestServer(BaseTestServer): + def __init__( + self, + app: Application, + *, + scheme: Union[str, object] = sentinel, + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ): + self.app = app + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + return AppRunner(self.app, **kwargs) + + +class RawTestServer(BaseTestServer): + def __init__( + self, + handler: _RequestHandler, + *, + scheme: Union[str, object] = sentinel, + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ) -> None: + self._handler = handler + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner: + srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs) + return ServerRunner(srv, debug=debug, **kwargs) + + +class TestClient: + """ + A test client implementation. + + To write functional tests for aiohttp based servers. + + """ + + __test__ = False + + def __init__( + self, + server: BaseTestServer, + *, + cookie_jar: Optional[AbstractCookieJar] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any, + ) -> None: + if not isinstance(server, BaseTestServer): + raise TypeError( + "server must be TestServer " "instance, found type: %r" % type(server) + ) + self._server = server + self._loop = loop + if cookie_jar is None: + cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) + self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) + self._closed = False + self._responses: List[ClientResponse] = [] + self._websockets: List[ClientWebSocketResponse] = [] + + async def start_server(self) -> None: + await self._server.start_server(loop=self._loop) + + @property + def host(self) -> str: + return self._server.host + + @property + def port(self) -> Optional[int]: + return self._server.port + + @property + def server(self) -> BaseTestServer: + return self._server + + @property + def app(self) -> Optional[Application]: + return cast(Optional[Application], getattr(self._server, "app", None)) + + @property + def session(self) -> ClientSession: + """An internal aiohttp.ClientSession. + + Unlike the methods on the TestClient, client session requests + do not automatically include the host in the url queried, and + will require an absolute path to the resource. + + """ + return self._session + + def make_url(self, path: str) -> URL: + return self._server.make_url(path) + + async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse: + resp = await self._session.request(method, self.make_url(path), **kwargs) + # save it to close later + self._responses.append(resp) + return resp + + def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager: + """Routes a request to tested http server. + + The interface is identical to aiohttp.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + test server. + + """ + return _RequestContextManager(self._request(method, path, **kwargs)) + + def get(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP GET request.""" + return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) + + def post(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP POST request.""" + return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) + + def options(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP OPTIONS request.""" + return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs)) + + def head(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP HEAD request.""" + return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) + + def put(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PUT request.""" + return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) + + def patch(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs)) + + def delete(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs)) + + def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager: + """Initiate websocket connection. + + The api corresponds to aiohttp.ClientSession.ws_connect. + + """ + return _WSRequestContextManager(self._ws_connect(path, **kwargs)) + + async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse: + ws = await self._session.ws_connect(self.make_url(path), **kwargs) + self._websockets.append(ws) + return ws + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run on exit when used as a(n) (asynchronous) + context manager. + + """ + if not self._closed: + for resp in self._responses: + resp.close() + for ws in self._websockets: + await ws.close() + await self._session.close() + await self._server.close() + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "TestClient": + await self.start_server() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class AioHTTPTestCase(TestCase): + """A base class to allow for unittest web applications using aiohttp. + + Provides the following: + + * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. + * self.loop (asyncio.BaseEventLoop): the event loop in which the + application and server are running. + * self.app (aiohttp.web.Application): the application returned by + self.get_application() + + Note that the TestClient's methods are asynchronous: you have to + execute function on the test client using asynchronous methods. + """ + + async def get_application(self) -> Application: + """Get application. + + This method should be overridden + to return the aiohttp.web.Application + object to test. + """ + return self.get_app() + + def get_app(self) -> Application: + """Obsolete method used to constructing web application. + + Use .get_application() coroutine instead. + """ + raise RuntimeError("Did you forget to define get_application()?") + + def setUp(self) -> None: + if not PY_38: + asyncio.get_event_loop().run_until_complete(self.asyncSetUp()) + + async def asyncSetUp(self) -> None: + try: + self.loop = asyncio.get_running_loop() + except (AttributeError, RuntimeError): # AttributeError->py36 + self.loop = asyncio.get_event_loop_policy().get_event_loop() + + return await self.setUpAsync() + + async def setUpAsync(self) -> None: + self.app = await self.get_application() + self.server = await self.get_server(self.app) + self.client = await self.get_client(self.server) + + await self.client.start_server() + + def tearDown(self) -> None: + if not PY_38: + self.loop.run_until_complete(self.asyncTearDown()) + + async def asyncTearDown(self) -> None: + return await self.tearDownAsync() + + async def tearDownAsync(self) -> None: + await self.client.close() + + async def get_server(self, app: Application) -> TestServer: + """Return a TestServer instance.""" + return TestServer(app, loop=self.loop) + + async def get_client(self, server: TestServer) -> TestClient: + """Return a TestClient instance.""" + return TestClient(server, loop=self.loop) + + +def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: + """ + A decorator dedicated to use with asynchronous AioHTTPTestCase test methods. + + In 3.8+, this does nothing. + """ + warnings.warn( + "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+", + DeprecationWarning, + stacklevel=2, + ) + return func + + +_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] + + +@contextlib.contextmanager +def loop_context( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False +) -> Iterator[asyncio.AbstractEventLoop]: + """A contextmanager that creates an event_loop, for test purposes. + + Handles the creation and cleanup of a test loop. + """ + loop = setup_test_loop(loop_factory) + yield loop + teardown_test_loop(loop, fast=fast) + + +def setup_test_loop( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, +) -> asyncio.AbstractEventLoop: + """Create and return an asyncio.BaseEventLoop instance. + + The caller should also call teardown_test_loop, + once they are done with the loop. + """ + loop = loop_factory() + try: + module = loop.__class__.__module__ + skip_watcher = "uvloop" in module + except AttributeError: # pragma: no cover + # Just in case + skip_watcher = True + asyncio.set_event_loop(loop) + if sys.platform != "win32" and not skip_watcher: + policy = asyncio.get_event_loop_policy() + watcher: asyncio.AbstractChildWatcher + try: # Python >= 3.8 + # Refs: + # * https://github.com/pytest-dev/pytest-xdist/issues/620 + # * https://stackoverflow.com/a/58614689/595220 + # * https://bugs.python.org/issue35621 + # * https://github.com/python/cpython/pull/14344 + watcher = asyncio.ThreadedChildWatcher() + except AttributeError: # Python < 3.8 + watcher = asyncio.SafeChildWatcher() + watcher.attach_loop(loop) + with contextlib.suppress(NotImplementedError): + policy.set_child_watcher(watcher) + return loop + + +def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None: + """Teardown and cleanup an event_loop created by setup_test_loop.""" + closed = loop.is_closed() + if not closed: + loop.call_soon(loop.stop) + loop.run_forever() + loop.close() + + if not fast: + gc.collect() + + asyncio.set_event_loop(None) + + +def _create_app_mock() -> mock.MagicMock: + def get_dict(app: Any, key: str) -> Any: + return app.__app_dict[key] + + def set_dict(app: Any, key: str, value: Any) -> None: + app.__app_dict[key] = value + + app = mock.MagicMock(spec=Application) + app.__app_dict = {} + app.__getitem__ = get_dict + app.__setitem__ = set_dict + + app._debug = False + app.on_response_prepare = Signal(app) + app.on_response_prepare.freeze() + return app + + +def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock: + transport = mock.Mock() + + def get_extra_info(key: str) -> Optional[SSLContext]: + if key == "sslcontext": + return sslcontext + else: + return None + + transport.get_extra_info.side_effect = get_extra_info + return transport + + +def make_mocked_request( + method: str, + path: str, + headers: Any = None, + *, + match_info: Any = sentinel, + version: HttpVersion = HttpVersion(1, 1), + closing: bool = False, + app: Any = None, + writer: Any = sentinel, + protocol: Any = sentinel, + transport: Any = sentinel, + payload: Any = sentinel, + sslcontext: Optional[SSLContext] = None, + client_max_size: int = 1024**2, + loop: Any = ..., +) -> Request: + """Creates mocked web.Request testing purposes. + + Useful in unit tests, when spinning full web server is overkill or + specific conditions and errors are hard to trigger. + """ + task = mock.Mock() + if loop is ...: + loop = mock.Mock() + loop.create_future.return_value = () + + if version < HttpVersion(1, 1): + closing = True + + if headers: + headers = CIMultiDictProxy(CIMultiDict(headers)) + raw_hdrs = tuple( + (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + ) + else: + headers = CIMultiDictProxy(CIMultiDict()) + raw_hdrs = () + + chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower() + + message = RawRequestMessage( + method, + path, + version, + headers, + raw_hdrs, + closing, + None, + False, + chunked, + URL(path), + ) + if app is None: + app = _create_app_mock() + + if transport is sentinel: + transport = _create_transport(sslcontext) + + if protocol is sentinel: + protocol = mock.Mock() + protocol.transport = transport + + if writer is sentinel: + writer = mock.Mock() + writer.write_headers = make_mocked_coro(None) + writer.write = make_mocked_coro(None) + writer.write_eof = make_mocked_coro(None) + writer.drain = make_mocked_coro(None) + writer.transport = transport + + protocol.transport = transport + protocol.writer = writer + + if payload is sentinel: + payload = mock.Mock() + + req = Request( + message, payload, protocol, writer, task, loop, client_max_size=client_max_size + ) + + match_info = UrlMappingMatchInfo( + {} if match_info is sentinel else match_info, mock.Mock() + ) + match_info.add_app(app) + req._match_info = match_info + + return req + + +def make_mocked_coro( + return_value: Any = sentinel, raise_exception: Any = sentinel +) -> Any: + """Creates a coroutine mock.""" + + async def mock_coro(*args: Any, **kwargs: Any) -> Any: + if raise_exception is not sentinel: + raise raise_exception + if not inspect.isawaitable(return_value): + return return_value + await return_value + + return mock.Mock(wraps=mock_coro) diff --git a/venv/lib/python3.8/site-packages/aiohttp/tracing.py b/venv/lib/python3.8/site-packages/aiohttp/tracing.py new file mode 100644 index 0000000..d5596a4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/tracing.py @@ -0,0 +1,472 @@ +from types import SimpleNamespace +from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar + +import attr +from aiosignal import Signal +from multidict import CIMultiDict +from yarl import URL + +from .client_reqrep import ClientResponse + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientSession + from .typedefs import Protocol + + _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) + + class _SignalCallback(Protocol[_ParamT_contra]): + def __call__( + self, + __client_session: ClientSession, + __trace_config_ctx: SimpleNamespace, + __params: _ParamT_contra, + ) -> Awaitable[None]: + ... + + +__all__ = ( + "TraceConfig", + "TraceRequestStartParams", + "TraceRequestEndParams", + "TraceRequestExceptionParams", + "TraceConnectionQueuedStartParams", + "TraceConnectionQueuedEndParams", + "TraceConnectionCreateStartParams", + "TraceConnectionCreateEndParams", + "TraceConnectionReuseconnParams", + "TraceDnsResolveHostStartParams", + "TraceDnsResolveHostEndParams", + "TraceDnsCacheHitParams", + "TraceDnsCacheMissParams", + "TraceRequestRedirectParams", + "TraceRequestChunkSentParams", + "TraceResponseChunkReceivedParams", + "TraceRequestHeadersSentParams", +) + + +class TraceConfig: + """First-class used to trace requests launched via ClientSession objects.""" + + def __init__( + self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace + ) -> None: + self._on_request_start: Signal[ + _SignalCallback[TraceRequestStartParams] + ] = Signal(self) + self._on_request_chunk_sent: Signal[ + _SignalCallback[TraceRequestChunkSentParams] + ] = Signal(self) + self._on_response_chunk_received: Signal[ + _SignalCallback[TraceResponseChunkReceivedParams] + ] = Signal(self) + self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal( + self + ) + self._on_request_exception: Signal[ + _SignalCallback[TraceRequestExceptionParams] + ] = Signal(self) + self._on_request_redirect: Signal[ + _SignalCallback[TraceRequestRedirectParams] + ] = Signal(self) + self._on_connection_queued_start: Signal[ + _SignalCallback[TraceConnectionQueuedStartParams] + ] = Signal(self) + self._on_connection_queued_end: Signal[ + _SignalCallback[TraceConnectionQueuedEndParams] + ] = Signal(self) + self._on_connection_create_start: Signal[ + _SignalCallback[TraceConnectionCreateStartParams] + ] = Signal(self) + self._on_connection_create_end: Signal[ + _SignalCallback[TraceConnectionCreateEndParams] + ] = Signal(self) + self._on_connection_reuseconn: Signal[ + _SignalCallback[TraceConnectionReuseconnParams] + ] = Signal(self) + self._on_dns_resolvehost_start: Signal[ + _SignalCallback[TraceDnsResolveHostStartParams] + ] = Signal(self) + self._on_dns_resolvehost_end: Signal[ + _SignalCallback[TraceDnsResolveHostEndParams] + ] = Signal(self) + self._on_dns_cache_hit: Signal[ + _SignalCallback[TraceDnsCacheHitParams] + ] = Signal(self) + self._on_dns_cache_miss: Signal[ + _SignalCallback[TraceDnsCacheMissParams] + ] = Signal(self) + self._on_request_headers_sent: Signal[ + _SignalCallback[TraceRequestHeadersSentParams] + ] = Signal(self) + + self._trace_config_ctx_factory = trace_config_ctx_factory + + def trace_config_ctx( + self, trace_request_ctx: Optional[SimpleNamespace] = None + ) -> SimpleNamespace: + """Return a new trace_config_ctx instance""" + return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) + + def freeze(self) -> None: + self._on_request_start.freeze() + self._on_request_chunk_sent.freeze() + self._on_response_chunk_received.freeze() + self._on_request_end.freeze() + self._on_request_exception.freeze() + self._on_request_redirect.freeze() + self._on_connection_queued_start.freeze() + self._on_connection_queued_end.freeze() + self._on_connection_create_start.freeze() + self._on_connection_create_end.freeze() + self._on_connection_reuseconn.freeze() + self._on_dns_resolvehost_start.freeze() + self._on_dns_resolvehost_end.freeze() + self._on_dns_cache_hit.freeze() + self._on_dns_cache_miss.freeze() + self._on_request_headers_sent.freeze() + + @property + def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]": + return self._on_request_start + + @property + def on_request_chunk_sent( + self, + ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]": + return self._on_request_chunk_sent + + @property + def on_response_chunk_received( + self, + ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]": + return self._on_response_chunk_received + + @property + def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]": + return self._on_request_end + + @property + def on_request_exception( + self, + ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]": + return self._on_request_exception + + @property + def on_request_redirect( + self, + ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]": + return self._on_request_redirect + + @property + def on_connection_queued_start( + self, + ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]": + return self._on_connection_queued_start + + @property + def on_connection_queued_end( + self, + ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]": + return self._on_connection_queued_end + + @property + def on_connection_create_start( + self, + ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]": + return self._on_connection_create_start + + @property + def on_connection_create_end( + self, + ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]": + return self._on_connection_create_end + + @property + def on_connection_reuseconn( + self, + ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]": + return self._on_connection_reuseconn + + @property + def on_dns_resolvehost_start( + self, + ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]": + return self._on_dns_resolvehost_start + + @property + def on_dns_resolvehost_end( + self, + ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]": + return self._on_dns_resolvehost_end + + @property + def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]": + return self._on_dns_cache_hit + + @property + def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]": + return self._on_dns_cache_miss + + @property + def on_request_headers_sent( + self, + ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]": + return self._on_request_headers_sent + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestStartParams: + """Parameters sent by the `on_request_start` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestChunkSentParams: + """Parameters sent by the `on_request_chunk_sent` signal""" + + method: str + url: URL + chunk: bytes + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceResponseChunkReceivedParams: + """Parameters sent by the `on_response_chunk_received` signal""" + + method: str + url: URL + chunk: bytes + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestEndParams: + """Parameters sent by the `on_request_end` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + response: ClientResponse + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestExceptionParams: + """Parameters sent by the `on_request_exception` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + exception: BaseException + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestRedirectParams: + """Parameters sent by the `on_request_redirect` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + response: ClientResponse + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionQueuedStartParams: + """Parameters sent by the `on_connection_queued_start` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionQueuedEndParams: + """Parameters sent by the `on_connection_queued_end` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionCreateStartParams: + """Parameters sent by the `on_connection_create_start` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionCreateEndParams: + """Parameters sent by the `on_connection_create_end` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionReuseconnParams: + """Parameters sent by the `on_connection_reuseconn` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsResolveHostStartParams: + """Parameters sent by the `on_dns_resolvehost_start` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsResolveHostEndParams: + """Parameters sent by the `on_dns_resolvehost_end` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsCacheHitParams: + """Parameters sent by the `on_dns_cache_hit` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsCacheMissParams: + """Parameters sent by the `on_dns_cache_miss` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestHeadersSentParams: + """Parameters sent by the `on_request_headers_sent` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + + +class Trace: + """Internal dependency holder class. + + Used to keep together the main dependencies used + at the moment of send a signal. + """ + + def __init__( + self, + session: "ClientSession", + trace_config: TraceConfig, + trace_config_ctx: SimpleNamespace, + ) -> None: + self._trace_config = trace_config + self._trace_config_ctx = trace_config_ctx + self._session = session + + async def send_request_start( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + return await self._trace_config.on_request_start.send( + self._session, + self._trace_config_ctx, + TraceRequestStartParams(method, url, headers), + ) + + async def send_request_chunk_sent( + self, method: str, url: URL, chunk: bytes + ) -> None: + return await self._trace_config.on_request_chunk_sent.send( + self._session, + self._trace_config_ctx, + TraceRequestChunkSentParams(method, url, chunk), + ) + + async def send_response_chunk_received( + self, method: str, url: URL, chunk: bytes + ) -> None: + return await self._trace_config.on_response_chunk_received.send( + self._session, + self._trace_config_ctx, + TraceResponseChunkReceivedParams(method, url, chunk), + ) + + async def send_request_end( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + response: ClientResponse, + ) -> None: + return await self._trace_config.on_request_end.send( + self._session, + self._trace_config_ctx, + TraceRequestEndParams(method, url, headers, response), + ) + + async def send_request_exception( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + exception: BaseException, + ) -> None: + return await self._trace_config.on_request_exception.send( + self._session, + self._trace_config_ctx, + TraceRequestExceptionParams(method, url, headers, exception), + ) + + async def send_request_redirect( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + response: ClientResponse, + ) -> None: + return await self._trace_config._on_request_redirect.send( + self._session, + self._trace_config_ctx, + TraceRequestRedirectParams(method, url, headers, response), + ) + + async def send_connection_queued_start(self) -> None: + return await self._trace_config.on_connection_queued_start.send( + self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams() + ) + + async def send_connection_queued_end(self) -> None: + return await self._trace_config.on_connection_queued_end.send( + self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams() + ) + + async def send_connection_create_start(self) -> None: + return await self._trace_config.on_connection_create_start.send( + self._session, self._trace_config_ctx, TraceConnectionCreateStartParams() + ) + + async def send_connection_create_end(self) -> None: + return await self._trace_config.on_connection_create_end.send( + self._session, self._trace_config_ctx, TraceConnectionCreateEndParams() + ) + + async def send_connection_reuseconn(self) -> None: + return await self._trace_config.on_connection_reuseconn.send( + self._session, self._trace_config_ctx, TraceConnectionReuseconnParams() + ) + + async def send_dns_resolvehost_start(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_start.send( + self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host) + ) + + async def send_dns_resolvehost_end(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_end.send( + self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host) + ) + + async def send_dns_cache_hit(self, host: str) -> None: + return await self._trace_config.on_dns_cache_hit.send( + self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host) + ) + + async def send_dns_cache_miss(self, host: str) -> None: + return await self._trace_config.on_dns_cache_miss.send( + self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host) + ) + + async def send_request_headers( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + return await self._trace_config._on_request_headers_sent.send( + self._session, + self._trace_config_ctx, + TraceRequestHeadersSentParams(method, url, headers), + ) diff --git a/venv/lib/python3.8/site-packages/aiohttp/typedefs.py b/venv/lib/python3.8/site-packages/aiohttp/typedefs.py new file mode 120000 index 0000000..799f8ec --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/typedefs.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a0/b9/c6/ddfc5c04476ee24233522d0da5c839923ab4d5c4642b6ed54a08cd9e6c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/web.py b/venv/lib/python3.8/site-packages/aiohttp/web.py new file mode 100644 index 0000000..cefae2b --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web.py @@ -0,0 +1,588 @@ +import asyncio +import logging +import socket +import sys +from argparse import ArgumentParser +from collections.abc import Iterable +from importlib import import_module +from typing import ( + Any, + Awaitable, + Callable, + Iterable as TypingIterable, + List, + Optional, + Set, + Type, + Union, + cast, +) + +from .abc import AbstractAccessLogger +from .helpers import all_tasks +from .log import access_logger +from .web_app import Application as Application, CleanupError as CleanupError +from .web_exceptions import ( + HTTPAccepted as HTTPAccepted, + HTTPBadGateway as HTTPBadGateway, + HTTPBadRequest as HTTPBadRequest, + HTTPClientError as HTTPClientError, + HTTPConflict as HTTPConflict, + HTTPCreated as HTTPCreated, + HTTPError as HTTPError, + HTTPException as HTTPException, + HTTPExpectationFailed as HTTPExpectationFailed, + HTTPFailedDependency as HTTPFailedDependency, + HTTPForbidden as HTTPForbidden, + HTTPFound as HTTPFound, + HTTPGatewayTimeout as HTTPGatewayTimeout, + HTTPGone as HTTPGone, + HTTPInsufficientStorage as HTTPInsufficientStorage, + HTTPInternalServerError as HTTPInternalServerError, + HTTPLengthRequired as HTTPLengthRequired, + HTTPMethodNotAllowed as HTTPMethodNotAllowed, + HTTPMisdirectedRequest as HTTPMisdirectedRequest, + HTTPMovedPermanently as HTTPMovedPermanently, + HTTPMultipleChoices as HTTPMultipleChoices, + HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired, + HTTPNoContent as HTTPNoContent, + HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation, + HTTPNotAcceptable as HTTPNotAcceptable, + HTTPNotExtended as HTTPNotExtended, + HTTPNotFound as HTTPNotFound, + HTTPNotImplemented as HTTPNotImplemented, + HTTPNotModified as HTTPNotModified, + HTTPOk as HTTPOk, + HTTPPartialContent as HTTPPartialContent, + HTTPPaymentRequired as HTTPPaymentRequired, + HTTPPermanentRedirect as HTTPPermanentRedirect, + HTTPPreconditionFailed as HTTPPreconditionFailed, + HTTPPreconditionRequired as HTTPPreconditionRequired, + HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired, + HTTPRedirection as HTTPRedirection, + HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge, + HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge, + HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable, + HTTPRequestTimeout as HTTPRequestTimeout, + HTTPRequestURITooLong as HTTPRequestURITooLong, + HTTPResetContent as HTTPResetContent, + HTTPSeeOther as HTTPSeeOther, + HTTPServerError as HTTPServerError, + HTTPServiceUnavailable as HTTPServiceUnavailable, + HTTPSuccessful as HTTPSuccessful, + HTTPTemporaryRedirect as HTTPTemporaryRedirect, + HTTPTooManyRequests as HTTPTooManyRequests, + HTTPUnauthorized as HTTPUnauthorized, + HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons, + HTTPUnprocessableEntity as HTTPUnprocessableEntity, + HTTPUnsupportedMediaType as HTTPUnsupportedMediaType, + HTTPUpgradeRequired as HTTPUpgradeRequired, + HTTPUseProxy as HTTPUseProxy, + HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates, + HTTPVersionNotSupported as HTTPVersionNotSupported, +) +from .web_fileresponse import FileResponse as FileResponse +from .web_log import AccessLogger +from .web_middlewares import ( + middleware as middleware, + normalize_path_middleware as normalize_path_middleware, +) +from .web_protocol import ( + PayloadAccessError as PayloadAccessError, + RequestHandler as RequestHandler, + RequestPayloadError as RequestPayloadError, +) +from .web_request import ( + BaseRequest as BaseRequest, + FileField as FileField, + Request as Request, +) +from .web_response import ( + ContentCoding as ContentCoding, + Response as Response, + StreamResponse as StreamResponse, + json_response as json_response, +) +from .web_routedef import ( + AbstractRouteDef as AbstractRouteDef, + RouteDef as RouteDef, + RouteTableDef as RouteTableDef, + StaticDef as StaticDef, + delete as delete, + get as get, + head as head, + options as options, + patch as patch, + post as post, + put as put, + route as route, + static as static, + view as view, +) +from .web_runner import ( + AppRunner as AppRunner, + BaseRunner as BaseRunner, + BaseSite as BaseSite, + GracefulExit as GracefulExit, + NamedPipeSite as NamedPipeSite, + ServerRunner as ServerRunner, + SockSite as SockSite, + TCPSite as TCPSite, + UnixSite as UnixSite, +) +from .web_server import Server as Server +from .web_urldispatcher import ( + AbstractResource as AbstractResource, + AbstractRoute as AbstractRoute, + DynamicResource as DynamicResource, + PlainResource as PlainResource, + PrefixedSubAppResource as PrefixedSubAppResource, + Resource as Resource, + ResourceRoute as ResourceRoute, + StaticResource as StaticResource, + UrlDispatcher as UrlDispatcher, + UrlMappingMatchInfo as UrlMappingMatchInfo, + View as View, +) +from .web_ws import ( + WebSocketReady as WebSocketReady, + WebSocketResponse as WebSocketResponse, + WSMsgType as WSMsgType, +) + +__all__ = ( + # web_app + "Application", + "CleanupError", + # web_exceptions + "HTTPAccepted", + "HTTPBadGateway", + "HTTPBadRequest", + "HTTPClientError", + "HTTPConflict", + "HTTPCreated", + "HTTPError", + "HTTPException", + "HTTPExpectationFailed", + "HTTPFailedDependency", + "HTTPForbidden", + "HTTPFound", + "HTTPGatewayTimeout", + "HTTPGone", + "HTTPInsufficientStorage", + "HTTPInternalServerError", + "HTTPLengthRequired", + "HTTPMethodNotAllowed", + "HTTPMisdirectedRequest", + "HTTPMovedPermanently", + "HTTPMultipleChoices", + "HTTPNetworkAuthenticationRequired", + "HTTPNoContent", + "HTTPNonAuthoritativeInformation", + "HTTPNotAcceptable", + "HTTPNotExtended", + "HTTPNotFound", + "HTTPNotImplemented", + "HTTPNotModified", + "HTTPOk", + "HTTPPartialContent", + "HTTPPaymentRequired", + "HTTPPermanentRedirect", + "HTTPPreconditionFailed", + "HTTPPreconditionRequired", + "HTTPProxyAuthenticationRequired", + "HTTPRedirection", + "HTTPRequestEntityTooLarge", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPRequestRangeNotSatisfiable", + "HTTPRequestTimeout", + "HTTPRequestURITooLong", + "HTTPResetContent", + "HTTPSeeOther", + "HTTPServerError", + "HTTPServiceUnavailable", + "HTTPSuccessful", + "HTTPTemporaryRedirect", + "HTTPTooManyRequests", + "HTTPUnauthorized", + "HTTPUnavailableForLegalReasons", + "HTTPUnprocessableEntity", + "HTTPUnsupportedMediaType", + "HTTPUpgradeRequired", + "HTTPUseProxy", + "HTTPVariantAlsoNegotiates", + "HTTPVersionNotSupported", + # web_fileresponse + "FileResponse", + # web_middlewares + "middleware", + "normalize_path_middleware", + # web_protocol + "PayloadAccessError", + "RequestHandler", + "RequestPayloadError", + # web_request + "BaseRequest", + "FileField", + "Request", + # web_response + "ContentCoding", + "Response", + "StreamResponse", + "json_response", + # web_routedef + "AbstractRouteDef", + "RouteDef", + "RouteTableDef", + "StaticDef", + "delete", + "get", + "head", + "options", + "patch", + "post", + "put", + "route", + "static", + "view", + # web_runner + "AppRunner", + "BaseRunner", + "BaseSite", + "GracefulExit", + "ServerRunner", + "SockSite", + "TCPSite", + "UnixSite", + "NamedPipeSite", + # web_server + "Server", + # web_urldispatcher + "AbstractResource", + "AbstractRoute", + "DynamicResource", + "PlainResource", + "PrefixedSubAppResource", + "Resource", + "ResourceRoute", + "StaticResource", + "UrlDispatcher", + "UrlMappingMatchInfo", + "View", + # web_ws + "WebSocketReady", + "WebSocketResponse", + "WSMsgType", + # web + "run_app", +) + + +try: + from ssl import SSLContext +except ImportError: # pragma: no cover + SSLContext = Any # type: ignore[misc,assignment] + +HostSequence = TypingIterable[str] + + +async def _run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Optional[str] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Callable[..., None] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, +) -> None: + # A internal functio to actually do all dirty job for application running + if asyncio.iscoroutine(app): + app = await app # type: ignore[misc] + + app = cast(Application, app) + + runner = AppRunner( + app, + handle_signals=handle_signals, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + keepalive_timeout=keepalive_timeout, + ) + + await runner.setup() + + sites: List[BaseSite] = [] + + try: + if host is not None: + if isinstance(host, (str, bytes, bytearray, memoryview)): + sites.append( + TCPSite( + runner, + host, + port, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + else: + for h in host: + sites.append( + TCPSite( + runner, + h, + port, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + elif path is None and sock is None or port is not None: + sites.append( + TCPSite( + runner, + port=port, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + + if path is not None: + if isinstance(path, (str, bytes, bytearray, memoryview)): + sites.append( + UnixSite( + runner, + path, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for p in path: + sites.append( + UnixSite( + runner, + p, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + + if sock is not None: + if not isinstance(sock, Iterable): + sites.append( + SockSite( + runner, + sock, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for s in sock: + sites.append( + SockSite( + runner, + s, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + for site in sites: + await site.start() + + if print: # pragma: no branch + names = sorted(str(s.name) for s in runner.sites) + print( + "======== Running on {} ========\n" + "(Press CTRL+C to quit)".format(", ".join(names)) + ) + + # sleep forever by 1 hour intervals, + # on Windows before Python 3.8 wake up every 1 second to handle + # Ctrl+C smoothly + if sys.platform == "win32" and sys.version_info < (3, 8): + delay = 1 + else: + delay = 3600 + + while True: + await asyncio.sleep(delay) + finally: + await runner.cleanup() + + +def _cancel_tasks( + to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop +) -> None: + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + +def run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Optional[str] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Callable[..., None] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> None: + """Run an app locally""" + if loop is None: + loop = asyncio.new_event_loop() + + # Configure if and only if in debugging mode and using the default logger + if loop.get_debug() and access_log and access_log.name == "aiohttp.access": + if access_log.level == logging.NOTSET: + access_log.setLevel(logging.DEBUG) + if not access_log.hasHandlers(): + access_log.addHandler(logging.StreamHandler()) + + main_task = loop.create_task( + _run_app( + app, + host=host, + port=port, + path=path, + sock=sock, + shutdown_timeout=shutdown_timeout, + keepalive_timeout=keepalive_timeout, + ssl_context=ssl_context, + print=print, + backlog=backlog, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + handle_signals=handle_signals, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + + try: + asyncio.set_event_loop(loop) + loop.run_until_complete(main_task) + except (GracefulExit, KeyboardInterrupt): # pragma: no cover + pass + finally: + _cancel_tasks({main_task}, loop) + _cancel_tasks(all_tasks(loop), loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() + + +def main(argv: List[str]) -> None: + arg_parser = ArgumentParser( + description="aiohttp.web Application server", prog="aiohttp.web" + ) + arg_parser.add_argument( + "entry_func", + help=( + "Callable returning the `aiohttp.web.Application` instance to " + "run. Should be specified in the 'module:function' syntax." + ), + metavar="entry-func", + ) + arg_parser.add_argument( + "-H", + "--hostname", + help="TCP/IP hostname to serve on (default: %(default)r)", + default="localhost", + ) + arg_parser.add_argument( + "-P", + "--port", + help="TCP/IP port to serve on (default: %(default)r)", + type=int, + default="8080", + ) + arg_parser.add_argument( + "-U", + "--path", + help="Unix file system path to serve on. Specifying a path will cause " + "hostname and port arguments to be ignored.", + ) + args, extra_argv = arg_parser.parse_known_args(argv) + + # Import logic + mod_str, _, func_str = args.entry_func.partition(":") + if not func_str or not mod_str: + arg_parser.error("'entry-func' not in 'module:function' syntax") + if mod_str.startswith("."): + arg_parser.error("relative module names not supported") + try: + module = import_module(mod_str) + except ImportError as ex: + arg_parser.error(f"unable to import {mod_str}: {ex}") + try: + func = getattr(module, func_str) + except AttributeError: + arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}") + + # Compatibility logic + if args.path is not None and not hasattr(socket, "AF_UNIX"): + arg_parser.error( + "file system paths not supported by your operating" " environment" + ) + + logging.basicConfig(level=logging.DEBUG) + + app = func(extra_argv) + run_app(app, host=args.hostname, port=args.port, path=args.path) + arg_parser.exit(message="Stopped\n") + + +if __name__ == "__main__": # pragma: no branch + main(sys.argv[1:]) # pragma: no cover diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_app.py b/venv/lib/python3.8/site-packages/aiohttp/web_app.py new file mode 100644 index 0000000..8fd4471 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_app.py @@ -0,0 +1,557 @@ +import asyncio +import logging +import warnings +from functools import partial, update_wrapper +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Awaitable, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from aiosignal import Signal +from frozenlist import FrozenList + +from . import hdrs +from .abc import ( + AbstractAccessLogger, + AbstractMatchInfo, + AbstractRouter, + AbstractStreamWriter, +) +from .helpers import DEBUG +from .http_parser import RawRequestMessage +from .log import web_logger +from .streams import StreamReader +from .web_log import AccessLogger +from .web_middlewares import _fix_request_current_app +from .web_protocol import RequestHandler +from .web_request import Request +from .web_response import StreamResponse +from .web_routedef import AbstractRouteDef +from .web_server import Server +from .web_urldispatcher import ( + AbstractResource, + AbstractRoute, + Domain, + MaskDomain, + MatchedSubAppResource, + PrefixedSubAppResource, + UrlDispatcher, +) + +__all__ = ("Application", "CleanupError") + + +if TYPE_CHECKING: # pragma: no cover + from .typedefs import Handler + + _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] + _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] + _Middleware = Union[ + Callable[[Request, Handler], Awaitable[StreamResponse]], + Callable[["Application", Handler], Awaitable[Handler]], # old-style + ] + _Middlewares = FrozenList[_Middleware] + _MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]] + _Subapps = List["Application"] +else: + # No type checker mode, skip types + _AppSignal = Signal + _RespPrepareSignal = Signal + _Middleware = Callable + _Middlewares = FrozenList + _MiddlewaresHandlers = Optional[Sequence] + _Subapps = List + + +class Application(MutableMapping[str, Any]): + ATTRS = frozenset( + [ + "logger", + "_debug", + "_router", + "_loop", + "_handler_args", + "_middlewares", + "_middlewares_handlers", + "_run_middlewares", + "_state", + "_frozen", + "_pre_frozen", + "_subapps", + "_on_response_prepare", + "_on_startup", + "_on_shutdown", + "_on_cleanup", + "_client_max_size", + "_cleanup_ctx", + ] + ) + + def __init__( + self, + *, + logger: logging.Logger = web_logger, + router: Optional[UrlDispatcher] = None, + middlewares: Iterable[_Middleware] = (), + handler_args: Optional[Mapping[str, Any]] = None, + client_max_size: int = 1024**2, + loop: Optional[asyncio.AbstractEventLoop] = None, + debug: Any = ..., # mypy doesn't support ellipsis + ) -> None: + if router is None: + router = UrlDispatcher() + else: + warnings.warn( + "router argument is deprecated", DeprecationWarning, stacklevel=2 + ) + assert isinstance(router, AbstractRouter), router + + if loop is not None: + warnings.warn( + "loop argument is deprecated", DeprecationWarning, stacklevel=2 + ) + + if debug is not ...: + warnings.warn( + "debug argument is deprecated", DeprecationWarning, stacklevel=2 + ) + self._debug = debug + self._router: UrlDispatcher = router + self._loop = loop + self._handler_args = handler_args + self.logger = logger + + self._middlewares: _Middlewares = FrozenList(middlewares) + + # initialized on freezing + self._middlewares_handlers: _MiddlewaresHandlers = None + # initialized on freezing + self._run_middlewares: Optional[bool] = None + + self._state: Dict[str, Any] = {} + self._frozen = False + self._pre_frozen = False + self._subapps: _Subapps = [] + + self._on_response_prepare: _RespPrepareSignal = Signal(self) + self._on_startup: _AppSignal = Signal(self) + self._on_shutdown: _AppSignal = Signal(self) + self._on_cleanup: _AppSignal = Signal(self) + self._cleanup_ctx = CleanupContext() + self._on_startup.append(self._cleanup_ctx._on_startup) + self._on_cleanup.append(self._cleanup_ctx._on_cleanup) + self._client_max_size = client_max_size + + def __init_subclass__(cls: Type["Application"]) -> None: + warnings.warn( + "Inheritance class {} from web.Application " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=2, + ) + + if DEBUG: # pragma: no cover + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom web.Application.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + # MutableMapping API + + def __eq__(self, other: object) -> bool: + return self is other + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def _check_frozen(self) -> None: + if self._frozen: + warnings.warn( + "Changing state of started or joined " "application is deprecated", + DeprecationWarning, + stacklevel=3, + ) + + def __setitem__(self, key: str, value: Any) -> None: + self._check_frozen() + self._state[key] = value + + def __delitem__(self, key: str) -> None: + self._check_frozen() + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + ######## + @property + def loop(self) -> asyncio.AbstractEventLoop: + # Technically the loop can be None + # but we mask it by explicit type cast + # to provide more convinient type annotation + warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2) + return cast(asyncio.AbstractEventLoop, self._loop) + + def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None: + if loop is None: + loop = asyncio.get_event_loop() + if self._loop is not None and self._loop is not loop: + raise RuntimeError( + "web.Application instance initialized with different loop" + ) + + self._loop = loop + + # set loop debug + if self._debug is ...: + self._debug = loop.get_debug() + + # set loop to sub applications + for subapp in self._subapps: + subapp._set_loop(loop) + + @property + def pre_frozen(self) -> bool: + return self._pre_frozen + + def pre_freeze(self) -> None: + if self._pre_frozen: + return + + self._pre_frozen = True + self._middlewares.freeze() + self._router.freeze() + self._on_response_prepare.freeze() + self._cleanup_ctx.freeze() + self._on_startup.freeze() + self._on_shutdown.freeze() + self._on_cleanup.freeze() + self._middlewares_handlers = tuple(self._prepare_middleware()) + + # If current app and any subapp do not have middlewares avoid run all + # of the code footprint that it implies, which have a middleware + # hardcoded per app that sets up the current_app attribute. If no + # middlewares are configured the handler will receive the proper + # current_app without needing all of this code. + self._run_middlewares = True if self.middlewares else False + + for subapp in self._subapps: + subapp.pre_freeze() + self._run_middlewares = self._run_middlewares or subapp._run_middlewares + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + if self._frozen: + return + + self.pre_freeze() + self._frozen = True + for subapp in self._subapps: + subapp.freeze() + + @property + def debug(self) -> bool: + warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2) + return self._debug # type: ignore[no-any-return] + + def _reg_subapp_signals(self, subapp: "Application") -> None: + def reg_handler(signame: str) -> None: + subsig = getattr(subapp, signame) + + async def handler(app: "Application") -> None: + await subsig.send(subapp) + + appsig = getattr(self, signame) + appsig.append(handler) + + reg_handler("on_startup") + reg_handler("on_shutdown") + reg_handler("on_cleanup") + + def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource: + if not isinstance(prefix, str): + raise TypeError("Prefix must be str") + prefix = prefix.rstrip("/") + if not prefix: + raise ValueError("Prefix cannot be empty") + factory = partial(PrefixedSubAppResource, prefix, subapp) + return self._add_subapp(factory, subapp) + + def _add_subapp( + self, resource_factory: Callable[[], AbstractResource], subapp: "Application" + ) -> AbstractResource: + if self.frozen: + raise RuntimeError("Cannot add sub application to frozen application") + if subapp.frozen: + raise RuntimeError("Cannot add frozen application") + resource = resource_factory() + self.router.register_resource(resource) + self._reg_subapp_signals(subapp) + self._subapps.append(subapp) + subapp.pre_freeze() + if self._loop is not None: + subapp._set_loop(self._loop) + return resource + + def add_domain(self, domain: str, subapp: "Application") -> AbstractResource: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + elif "*" in domain: + rule: Domain = MaskDomain(domain) + else: + rule = Domain(domain) + factory = partial(MatchedSubAppResource, rule, subapp) + return self._add_subapp(factory, subapp) + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + return self.router.add_routes(routes) + + @property + def on_response_prepare(self) -> _RespPrepareSignal: + return self._on_response_prepare + + @property + def on_startup(self) -> _AppSignal: + return self._on_startup + + @property + def on_shutdown(self) -> _AppSignal: + return self._on_shutdown + + @property + def on_cleanup(self) -> _AppSignal: + return self._on_cleanup + + @property + def cleanup_ctx(self) -> "CleanupContext": + return self._cleanup_ctx + + @property + def router(self) -> UrlDispatcher: + return self._router + + @property + def middlewares(self) -> _Middlewares: + return self._middlewares + + def _make_handler( + self, + *, + loop: Optional[asyncio.AbstractEventLoop] = None, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + **kwargs: Any, + ) -> Server: + + if not issubclass(access_log_class, AbstractAccessLogger): + raise TypeError( + "access_log_class must be subclass of " + "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class) + ) + + self._set_loop(loop) + self.freeze() + + kwargs["debug"] = self._debug + kwargs["access_log_class"] = access_log_class + if self._handler_args: + for k, v in self._handler_args.items(): + kwargs[k] = v + + return Server( + self._handle, # type: ignore[arg-type] + request_factory=self._make_request, + loop=self._loop, + **kwargs, + ) + + def make_handler( + self, + *, + loop: Optional[asyncio.AbstractEventLoop] = None, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + **kwargs: Any, + ) -> Server: + + warnings.warn( + "Application.make_handler(...) is deprecated, " "use AppRunner API instead", + DeprecationWarning, + stacklevel=2, + ) + + return self._make_handler( + loop=loop, access_log_class=access_log_class, **kwargs + ) + + async def startup(self) -> None: + """Causes on_startup signal + + Should be called in the event loop along with the request handler. + """ + await self.on_startup.send(self) + + async def shutdown(self) -> None: + """Causes on_shutdown signal + + Should be called before cleanup() + """ + await self.on_shutdown.send(self) + + async def cleanup(self) -> None: + """Causes on_cleanup signal + + Should be called after shutdown() + """ + if self.on_cleanup.frozen: + await self.on_cleanup.send(self) + else: + # If an exception occurs in startup, ensure cleanup contexts are completed. + await self._cleanup_ctx._on_cleanup(self) + + def _make_request( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + _cls: Type[Request] = Request, + ) -> Request: + return _cls( + message, + payload, + protocol, + writer, + task, + self._loop, + client_max_size=self._client_max_size, + ) + + def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]: + for m in reversed(self._middlewares): + if getattr(m, "__middleware_version__", None) == 1: + yield m, True + else: + warnings.warn( + 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m), + DeprecationWarning, + stacklevel=2, + ) + yield m, False + + yield _fix_request_current_app(self), True + + async def _handle(self, request: Request) -> StreamResponse: + loop = asyncio.get_event_loop() + debug = loop.get_debug() + match_info = await self._router.resolve(request) + if debug: # pragma: no cover + if not isinstance(match_info, AbstractMatchInfo): + raise TypeError( + "match_info should be AbstractMatchInfo " + "instance, not {!r}".format(match_info) + ) + match_info.add_app(self) + + match_info.freeze() + + resp = None + request._match_info = match_info + expect = request.headers.get(hdrs.EXPECT) + if expect: + resp = await match_info.expect_handler(request) + await request.writer.drain() + + if resp is None: + handler = match_info.handler + + if self._run_middlewares: + for app in match_info.apps[::-1]: + for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa + if new_style: + handler = update_wrapper( + partial(m, handler=handler), handler + ) + else: + handler = await m(app, handler) # type: ignore[arg-type] + + resp = await handler(request) + + return resp + + def __call__(self) -> "Application": + """gunicorn compatibility""" + return self + + def __repr__(self) -> str: + return f"" + + def __bool__(self) -> bool: + return True + + +class CleanupError(RuntimeError): + @property + def exceptions(self) -> List[BaseException]: + return cast(List[BaseException], self.args[1]) + + +if TYPE_CHECKING: # pragma: no cover + _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] +else: + _CleanupContextBase = FrozenList + + +class CleanupContext(_CleanupContextBase): + def __init__(self) -> None: + super().__init__() + self._exits: List[AsyncIterator[None]] = [] + + async def _on_startup(self, app: Application) -> None: + for cb in self: + it = cb(app).__aiter__() + await it.__anext__() + self._exits.append(it) + + async def _on_cleanup(self, app: Application) -> None: + errors = [] + for it in reversed(self._exits): + try: + await it.__anext__() + except StopAsyncIteration: + pass + except Exception as exc: + errors.append(exc) + else: + errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) + if errors: + if len(errors) == 1: + raise errors[0] + else: + raise CleanupError("Multiple errors on cleanup stage", errors) diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_exceptions.py b/venv/lib/python3.8/site-packages/aiohttp/web_exceptions.py new file mode 100644 index 0000000..ae706a1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_exceptions.py @@ -0,0 +1,441 @@ +import warnings +from typing import Any, Dict, Iterable, List, Optional, Set # noqa + +from yarl import URL + +from .typedefs import LooseHeaders, StrOrURL +from .web_response import Response + +__all__ = ( + "HTTPException", + "HTTPError", + "HTTPRedirection", + "HTTPSuccessful", + "HTTPOk", + "HTTPCreated", + "HTTPAccepted", + "HTTPNonAuthoritativeInformation", + "HTTPNoContent", + "HTTPResetContent", + "HTTPPartialContent", + "HTTPMultipleChoices", + "HTTPMovedPermanently", + "HTTPFound", + "HTTPSeeOther", + "HTTPNotModified", + "HTTPUseProxy", + "HTTPTemporaryRedirect", + "HTTPPermanentRedirect", + "HTTPClientError", + "HTTPBadRequest", + "HTTPUnauthorized", + "HTTPPaymentRequired", + "HTTPForbidden", + "HTTPNotFound", + "HTTPMethodNotAllowed", + "HTTPNotAcceptable", + "HTTPProxyAuthenticationRequired", + "HTTPRequestTimeout", + "HTTPConflict", + "HTTPGone", + "HTTPLengthRequired", + "HTTPPreconditionFailed", + "HTTPRequestEntityTooLarge", + "HTTPRequestURITooLong", + "HTTPUnsupportedMediaType", + "HTTPRequestRangeNotSatisfiable", + "HTTPExpectationFailed", + "HTTPMisdirectedRequest", + "HTTPUnprocessableEntity", + "HTTPFailedDependency", + "HTTPUpgradeRequired", + "HTTPPreconditionRequired", + "HTTPTooManyRequests", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPUnavailableForLegalReasons", + "HTTPServerError", + "HTTPInternalServerError", + "HTTPNotImplemented", + "HTTPBadGateway", + "HTTPServiceUnavailable", + "HTTPGatewayTimeout", + "HTTPVersionNotSupported", + "HTTPVariantAlsoNegotiates", + "HTTPInsufficientStorage", + "HTTPNotExtended", + "HTTPNetworkAuthenticationRequired", +) + + +############################################################ +# HTTP Exceptions +############################################################ + + +class HTTPException(Response, Exception): + + # You should set in subclasses: + # status = 200 + + status_code = -1 + empty_body = False + + __http_exception__ = True + + def __init__( + self, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + if body is not None: + warnings.warn( + "body argument is deprecated for http web exceptions", + DeprecationWarning, + ) + Response.__init__( + self, + status=self.status_code, + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + Exception.__init__(self, self.reason) + if self.body is None and not self.empty_body: + self.text = f"{self.status}: {self.reason}" + + def __bool__(self) -> bool: + return True + + +class HTTPError(HTTPException): + """Base class for exceptions with status codes in the 400s and 500s.""" + + +class HTTPRedirection(HTTPException): + """Base class for exceptions with status codes in the 300s.""" + + +class HTTPSuccessful(HTTPException): + """Base class for exceptions with status codes in the 200s.""" + + +class HTTPOk(HTTPSuccessful): + status_code = 200 + + +class HTTPCreated(HTTPSuccessful): + status_code = 201 + + +class HTTPAccepted(HTTPSuccessful): + status_code = 202 + + +class HTTPNonAuthoritativeInformation(HTTPSuccessful): + status_code = 203 + + +class HTTPNoContent(HTTPSuccessful): + status_code = 204 + empty_body = True + + +class HTTPResetContent(HTTPSuccessful): + status_code = 205 + empty_body = True + + +class HTTPPartialContent(HTTPSuccessful): + status_code = 206 + + +############################################################ +# 3xx redirection +############################################################ + + +class _HTTPMove(HTTPRedirection): + def __init__( + self, + location: StrOrURL, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + if not location: + raise ValueError("HTTP redirects need a location to redirect to.") + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Location"] = str(URL(location)) + self.location = location + + +class HTTPMultipleChoices(_HTTPMove): + status_code = 300 + + +class HTTPMovedPermanently(_HTTPMove): + status_code = 301 + + +class HTTPFound(_HTTPMove): + status_code = 302 + + +# This one is safe after a POST (the redirected location will be +# retrieved with GET): +class HTTPSeeOther(_HTTPMove): + status_code = 303 + + +class HTTPNotModified(HTTPRedirection): + # FIXME: this should include a date or etag header + status_code = 304 + empty_body = True + + +class HTTPUseProxy(_HTTPMove): + # Not a move, but looks a little like one + status_code = 305 + + +class HTTPTemporaryRedirect(_HTTPMove): + status_code = 307 + + +class HTTPPermanentRedirect(_HTTPMove): + status_code = 308 + + +############################################################ +# 4xx client error +############################################################ + + +class HTTPClientError(HTTPError): + pass + + +class HTTPBadRequest(HTTPClientError): + status_code = 400 + + +class HTTPUnauthorized(HTTPClientError): + status_code = 401 + + +class HTTPPaymentRequired(HTTPClientError): + status_code = 402 + + +class HTTPForbidden(HTTPClientError): + status_code = 403 + + +class HTTPNotFound(HTTPClientError): + status_code = 404 + + +class HTTPMethodNotAllowed(HTTPClientError): + status_code = 405 + + def __init__( + self, + method: str, + allowed_methods: Iterable[str], + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + allow = ",".join(sorted(allowed_methods)) + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Allow"] = allow + self.allowed_methods: Set[str] = set(allowed_methods) + self.method = method.upper() + + +class HTTPNotAcceptable(HTTPClientError): + status_code = 406 + + +class HTTPProxyAuthenticationRequired(HTTPClientError): + status_code = 407 + + +class HTTPRequestTimeout(HTTPClientError): + status_code = 408 + + +class HTTPConflict(HTTPClientError): + status_code = 409 + + +class HTTPGone(HTTPClientError): + status_code = 410 + + +class HTTPLengthRequired(HTTPClientError): + status_code = 411 + + +class HTTPPreconditionFailed(HTTPClientError): + status_code = 412 + + +class HTTPRequestEntityTooLarge(HTTPClientError): + status_code = 413 + + def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None: + kwargs.setdefault( + "text", + "Maximum request body size {} exceeded, " + "actual body size {}".format(max_size, actual_size), + ) + super().__init__(**kwargs) + + +class HTTPRequestURITooLong(HTTPClientError): + status_code = 414 + + +class HTTPUnsupportedMediaType(HTTPClientError): + status_code = 415 + + +class HTTPRequestRangeNotSatisfiable(HTTPClientError): + status_code = 416 + + +class HTTPExpectationFailed(HTTPClientError): + status_code = 417 + + +class HTTPMisdirectedRequest(HTTPClientError): + status_code = 421 + + +class HTTPUnprocessableEntity(HTTPClientError): + status_code = 422 + + +class HTTPFailedDependency(HTTPClientError): + status_code = 424 + + +class HTTPUpgradeRequired(HTTPClientError): + status_code = 426 + + +class HTTPPreconditionRequired(HTTPClientError): + status_code = 428 + + +class HTTPTooManyRequests(HTTPClientError): + status_code = 429 + + +class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): + status_code = 431 + + +class HTTPUnavailableForLegalReasons(HTTPClientError): + status_code = 451 + + def __init__( + self, + link: str, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Link"] = '<%s>; rel="blocked-by"' % link + self.link = link + + +############################################################ +# 5xx Server Error +############################################################ +# Response status codes beginning with the digit "5" indicate cases in +# which the server is aware that it has erred or is incapable of +# performing the request. Except when responding to a HEAD request, the +# server SHOULD include an entity containing an explanation of the error +# situation, and whether it is a temporary or permanent condition. User +# agents SHOULD display any included entity to the user. These response +# codes are applicable to any request method. + + +class HTTPServerError(HTTPError): + pass + + +class HTTPInternalServerError(HTTPServerError): + status_code = 500 + + +class HTTPNotImplemented(HTTPServerError): + status_code = 501 + + +class HTTPBadGateway(HTTPServerError): + status_code = 502 + + +class HTTPServiceUnavailable(HTTPServerError): + status_code = 503 + + +class HTTPGatewayTimeout(HTTPServerError): + status_code = 504 + + +class HTTPVersionNotSupported(HTTPServerError): + status_code = 505 + + +class HTTPVariantAlsoNegotiates(HTTPServerError): + status_code = 506 + + +class HTTPInsufficientStorage(HTTPServerError): + status_code = 507 + + +class HTTPNotExtended(HTTPServerError): + status_code = 510 + + +class HTTPNetworkAuthenticationRequired(HTTPServerError): + status_code = 511 diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_fileresponse.py b/venv/lib/python3.8/site-packages/aiohttp/web_fileresponse.py new file mode 120000 index 0000000..c0ba72e --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_fileresponse.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/17/fc/51/bd814bda8c78a15356dd64a3c10e8b995a6491833c30fc6caa236c7d48 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_log.py b/venv/lib/python3.8/site-packages/aiohttp/web_log.py new file mode 100644 index 0000000..bc6e3b5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_log.py @@ -0,0 +1,208 @@ +import datetime +import functools +import logging +import os +import re +from collections import namedtuple +from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa + +from .abc import AbstractAccessLogger +from .web_request import BaseRequest +from .web_response import StreamResponse + +KeyMethod = namedtuple("KeyMethod", "key method") + + +class AccessLogger(AbstractAccessLogger): + """Helper object to log access. + + Usage: + log = logging.getLogger("spam") + log_format = "%a %{User-Agent}i" + access_logger = AccessLogger(log, log_format) + access_logger.log(request, response, time) + + Format: + %% The percent sign + %a Remote IP-address (IP-address of proxy if using reverse proxy) + %t Time when the request was started to process + %P The process ID of the child that serviced the request + %r First line of request + %s Response status code + %b Size of response in bytes, including HTTP headers + %T Time taken to serve the request, in seconds + %Tf Time taken to serve the request, in seconds with floating fraction + in .06f format + %D Time taken to serve the request, in microseconds + %{FOO}i request.headers['FOO'] + %{FOO}o response.headers['FOO'] + %{FOO}e os.environ['FOO'] + + """ + + LOG_FORMAT_MAP = { + "a": "remote_address", + "t": "request_start_time", + "P": "process_id", + "r": "first_request_line", + "s": "response_status", + "b": "response_size", + "T": "request_time", + "Tf": "request_time_frac", + "D": "request_time_micro", + "i": "request_header", + "o": "response_header", + } + + LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' + FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)") + CLEANUP_RE = re.compile(r"(%[^s])") + _FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {} + + def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None: + """Initialise the logger. + + logger is a logger object to be used for logging. + log_format is a string with apache compatible log format description. + + """ + super().__init__(logger, log_format=log_format) + + _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) + if not _compiled_format: + _compiled_format = self.compile_format(log_format) + AccessLogger._FORMAT_CACHE[log_format] = _compiled_format + + self._log_format, self._methods = _compiled_format + + def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: + """Translate log_format into form usable by modulo formatting + + All known atoms will be replaced with %s + Also methods for formatting of those atoms will be added to + _methods in appropriate order + + For example we have log_format = "%a %t" + This format will be translated to "%s %s" + Also contents of _methods will be + [self._format_a, self._format_t] + These method will be called and results will be passed + to translated string format. + + Each _format_* method receive 'args' which is list of arguments + given to self.log + + Exceptions are _format_e, _format_i and _format_o methods which + also receive key name (by functools.partial) + + """ + # list of (key, method) tuples, we don't use an OrderedDict as users + # can repeat the same key more than once + methods = list() + + for atom in self.FORMAT_RE.findall(log_format): + if atom[1] == "": + format_key1 = self.LOG_FORMAT_MAP[atom[0]] + m = getattr(AccessLogger, "_format_%s" % atom[0]) + key_method = KeyMethod(format_key1, m) + else: + format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) + m = getattr(AccessLogger, "_format_%s" % atom[2]) + key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) + + methods.append(key_method) + + log_format = self.FORMAT_RE.sub(r"%s", log_format) + log_format = self.CLEANUP_RE.sub(r"%\1", log_format) + return log_format, methods + + @staticmethod + def _format_i( + key: str, request: BaseRequest, response: StreamResponse, time: float + ) -> str: + if request is None: + return "(no headers)" + + # suboptimal, make istr(key) once + return request.headers.get(key, "-") + + @staticmethod + def _format_o( + key: str, request: BaseRequest, response: StreamResponse, time: float + ) -> str: + # suboptimal, make istr(key) once + return response.headers.get(key, "-") + + @staticmethod + def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str: + if request is None: + return "-" + ip = request.remote + return ip if ip is not None else "-" + + @staticmethod + def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str: + now = datetime.datetime.utcnow() + start_time = now - datetime.timedelta(seconds=time) + return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]") + + @staticmethod + def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str: + return "<%s>" % os.getpid() + + @staticmethod + def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str: + if request is None: + return "-" + return "{} {} HTTP/{}.{}".format( + request.method, + request.path_qs, + request.version.major, + request.version.minor, + ) + + @staticmethod + def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int: + return response.status + + @staticmethod + def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int: + return response.body_length + + @staticmethod + def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str: + return str(round(time)) + + @staticmethod + def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str: + return "%06f" % time + + @staticmethod + def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str: + return str(round(time * 1000000)) + + def _format_line( + self, request: BaseRequest, response: StreamResponse, time: float + ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: + return [(key, method(request, response, time)) for key, method in self._methods] + + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: + try: + fmt_info = self._format_line(request, response, time) + + values = list() + extra = dict() + for key, value in fmt_info: + values.append(value) + + if key.__class__ is str: + extra[key] = value + else: + k1, k2 = key # type: ignore[misc] + dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type] + dct[k2] = value # type: ignore[index,has-type] + extra[k1] = dct # type: ignore[has-type,assignment] + + self.logger.info(self._log_format % tuple(values), extra=extra) + except Exception: + self.logger.exception("Error in logging") diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_middlewares.py b/venv/lib/python3.8/site-packages/aiohttp/web_middlewares.py new file mode 120000 index 0000000..7c1eccd --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_middlewares.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9e/79/65/160c57f468ef906dd85b8de33c31fb0b4de209fc8c071858c3e3a44c8d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_protocol.py b/venv/lib/python3.8/site-packages/aiohttp/web_protocol.py new file mode 100644 index 0000000..10a9608 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_protocol.py @@ -0,0 +1,679 @@ +import asyncio +import asyncio.streams +import traceback +import warnings +from collections import deque +from contextlib import suppress +from html import escape as html_escape +from http import HTTPStatus +from logging import Logger +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Deque, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import attr +import yarl + +from .abc import AbstractAccessLogger, AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import ceil_timeout +from .http import ( + HttpProcessingError, + HttpRequestParser, + HttpVersion10, + RawRequestMessage, + StreamWriter, +) +from .log import access_logger, server_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .tcp_helpers import tcp_keepalive +from .web_exceptions import HTTPException +from .web_log import AccessLogger +from .web_request import BaseRequest +from .web_response import Response, StreamResponse + +__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") + +if TYPE_CHECKING: # pragma: no cover + from .web_server import Server + + +_RequestFactory = Callable[ + [ + RawRequestMessage, + StreamReader, + "RequestHandler", + AbstractStreamWriter, + "asyncio.Task[None]", + ], + BaseRequest, +] + +_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]] + +ERROR = RawRequestMessage( + "UNKNOWN", + "/", + HttpVersion10, + {}, # type: ignore[arg-type] + {}, # type: ignore[arg-type] + True, + None, + False, + False, + yarl.URL("/"), +) + + +class RequestPayloadError(Exception): + """Payload parsing error.""" + + +class PayloadAccessError(Exception): + """Payload was accessed after response was sent.""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class _ErrInfo: + status: int + exc: BaseException + message: str + + +_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader] + + +class RequestHandler(BaseProtocol): + """HTTP protocol implementation. + + RequestHandler handles incoming HTTP request. It reads request line, + request headers and request payload and calls handle_request() method. + By default it always returns with 404 response. + + RequestHandler handles errors in incoming request, like bad + status line, bad headers or incomplete payload. If any error occurs, + connection gets closed. + + keepalive_timeout -- number of seconds before closing + keep-alive connection + + tcp_keepalive -- TCP keep-alive is on, default is on + + debug -- enable debug mode + + logger -- custom logger object + + access_log_class -- custom class for access_logger + + access_log -- custom logging object + + access_log_format -- access log format string + + loop -- Optional event loop + + max_line_size -- Optional maximum header line size + + max_field_size -- Optional maximum header field size + + max_headers -- Optional maximum header size + + """ + + KEEPALIVE_RESCHEDULE_DELAY = 1 + + __slots__ = ( + "_request_count", + "_keepalive", + "_manager", + "_request_handler", + "_request_factory", + "_tcp_keepalive", + "_keepalive_time", + "_keepalive_handle", + "_keepalive_timeout", + "_lingering_time", + "_messages", + "_message_tail", + "_waiter", + "_task_handler", + "_upgrade", + "_payload_parser", + "_request_parser", + "_reading_paused", + "logger", + "debug", + "access_log", + "access_logger", + "_close", + "_force_close", + "_current_request", + ) + + def __init__( + self, + manager: "Server", + *, + loop: asyncio.AbstractEventLoop, + keepalive_timeout: float = 75.0, # NGINX default is 75 secs + tcp_keepalive: bool = True, + logger: Logger = server_logger, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log: Logger = access_logger, + access_log_format: str = AccessLogger.LOG_FORMAT, + debug: bool = False, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + lingering_time: float = 10.0, + read_bufsize: int = 2**16, + auto_decompress: bool = True, + ): + super().__init__(loop) + + self._request_count = 0 + self._keepalive = False + self._current_request: Optional[BaseRequest] = None + self._manager: Optional[Server] = manager + self._request_handler: Optional[_RequestHandler] = manager.request_handler + self._request_factory: Optional[_RequestFactory] = manager.request_factory + + self._tcp_keepalive = tcp_keepalive + # placeholder to be replaced on keepalive timeout setup + self._keepalive_time = 0.0 + self._keepalive_handle: Optional[asyncio.Handle] = None + self._keepalive_timeout = keepalive_timeout + self._lingering_time = float(lingering_time) + + self._messages: Deque[_MsgType] = deque() + self._message_tail = b"" + + self._waiter: Optional[asyncio.Future[None]] = None + self._task_handler: Optional[asyncio.Task[None]] = None + + self._upgrade = False + self._payload_parser: Any = None + self._request_parser: Optional[HttpRequestParser] = HttpRequestParser( + self, + loop, + read_bufsize, + max_line_size=max_line_size, + max_field_size=max_field_size, + max_headers=max_headers, + payload_exception=RequestPayloadError, + auto_decompress=auto_decompress, + ) + + self.logger = logger + self.debug = debug + self.access_log = access_log + if access_log: + self.access_logger: Optional[AbstractAccessLogger] = access_log_class( + access_log, access_log_format + ) + else: + self.access_logger = None + + self._close = False + self._force_close = False + + def __repr__(self) -> str: + return "<{} {}>".format( + self.__class__.__name__, + "connected" if self.transport is not None else "disconnected", + ) + + @property + def keepalive_timeout(self) -> float: + return self._keepalive_timeout + + async def shutdown(self, timeout: Optional[float] = 15.0) -> None: + """Do worker process exit preparations. + + We need to clean up everything and stop accepting requests. + It is especially important for keep-alive connections. + """ + self._force_close = True + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._waiter: + self._waiter.cancel() + + # wait for handlers + with suppress(asyncio.CancelledError, asyncio.TimeoutError): + async with ceil_timeout(timeout): + if self._current_request is not None: + self._current_request._cancel(asyncio.CancelledError()) + + if self._task_handler is not None and not self._task_handler.done(): + await self._task_handler + + # force-close non-idle handler + if self._task_handler is not None: + self._task_handler.cancel() + + if self.transport is not None: + self.transport.close() + self.transport = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + super().connection_made(transport) + + real_transport = cast(asyncio.Transport, transport) + if self._tcp_keepalive: + tcp_keepalive(real_transport) + + self._task_handler = self._loop.create_task(self.start()) + assert self._manager is not None + self._manager.connection_made(self, real_transport) + + def connection_lost(self, exc: Optional[BaseException]) -> None: + if self._manager is None: + return + self._manager.connection_lost(self, exc) + + super().connection_lost(exc) + + self._manager = None + self._force_close = True + self._request_factory = None + self._request_handler = None + self._request_parser = None + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._current_request is not None: + if exc is None: + exc = ConnectionResetError("Connection lost") + self._current_request._cancel(exc) + + if self._waiter is not None: + self._waiter.cancel() + + self._task_handler = None + + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + + def set_parser(self, parser: Any) -> None: + # Actual type is WebReader + assert self._payload_parser is None + + self._payload_parser = parser + + if self._message_tail: + self._payload_parser.feed_data(self._message_tail) + self._message_tail = b"" + + def eof_received(self) -> None: + pass + + def data_received(self, data: bytes) -> None: + if self._force_close or self._close: + return + # parse http messages + messages: Sequence[_MsgType] + if self._payload_parser is None and not self._upgrade: + assert self._request_parser is not None + try: + messages, upgraded, tail = self._request_parser.feed_data(data) + except HttpProcessingError as exc: + messages = [ + (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD) + ] + upgraded = False + tail = b"" + + for msg, payload in messages or (): + self._request_count += 1 + self._messages.append((msg, payload)) + + waiter = self._waiter + if messages and waiter is not None and not waiter.done(): + # don't set result twice + waiter.set_result(None) + + self._upgrade = upgraded + if upgraded and tail: + self._message_tail = tail + + # no parser, just store + elif self._payload_parser is None and self._upgrade and data: + self._message_tail += data + + # feed payload + elif data: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self.close() + + def keep_alive(self, val: bool) -> None: + """Set keep-alive connection mode. + + :param bool val: new state. + """ + self._keepalive = val + if self._keepalive_handle: + self._keepalive_handle.cancel() + self._keepalive_handle = None + + def close(self) -> None: + """Close connection. + + Stop accepting new pipelining messages and close + connection when handlers done processing messages. + """ + self._close = True + if self._waiter: + self._waiter.cancel() + + def force_close(self) -> None: + """Forcefully close connection.""" + self._force_close = True + if self._waiter: + self._waiter.cancel() + if self.transport is not None: + self.transport.close() + self.transport = None + + def log_access( + self, request: BaseRequest, response: StreamResponse, time: float + ) -> None: + if self.access_logger is not None: + self.access_logger.log(request, response, self._loop.time() - time) + + def log_debug(self, *args: Any, **kw: Any) -> None: + if self.debug: + self.logger.debug(*args, **kw) + + def log_exception(self, *args: Any, **kw: Any) -> None: + self.logger.exception(*args, **kw) + + def _process_keepalive(self) -> None: + if self._force_close or not self._keepalive: + return + + next = self._keepalive_time + self._keepalive_timeout + + # handler in idle state + if self._waiter: + if self._loop.time() > next: + self.force_close() + return + + # not all request handlers are done, + # reschedule itself to next second + self._keepalive_handle = self._loop.call_later( + self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive + ) + + async def _handle_request( + self, + request: BaseRequest, + start_time: float, + request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], + ) -> Tuple[StreamResponse, bool]: + assert self._request_handler is not None + try: + try: + self._current_request = request + resp = await request_handler(request) + finally: + self._current_request = None + except HTTPException as exc: + resp = exc + reset = await self.finish_response(request, resp, start_time) + except asyncio.CancelledError: + raise + except asyncio.TimeoutError as exc: + self.log_debug("Request handler timed out.", exc_info=exc) + resp = self.handle_error(request, 504) + reset = await self.finish_response(request, resp, start_time) + except Exception as exc: + resp = self.handle_error(request, 500, exc) + reset = await self.finish_response(request, resp, start_time) + else: + # Deprecation warning (See #2415) + if getattr(resp, "__http_exception__", False): + warnings.warn( + "returning HTTPException object is deprecated " + "(#2415) and will be removed, " + "please raise the exception instead", + DeprecationWarning, + ) + + reset = await self.finish_response(request, resp, start_time) + + return resp, reset + + async def start(self) -> None: + """Process incoming request. + + It reads request line, request headers and request payload, then + calls handle_request() method. Subclass has to override + handle_request(). start() handles various exceptions in request + or response handling. Connection is being closed always unless + keep_alive(True) specified. + """ + loop = self._loop + handler = self._task_handler + assert handler is not None + manager = self._manager + assert manager is not None + keepalive_timeout = self._keepalive_timeout + resp = None + assert self._request_factory is not None + assert self._request_handler is not None + + while not self._force_close: + if not self._messages: + try: + # wait for next request + self._waiter = loop.create_future() + await self._waiter + except asyncio.CancelledError: + break + finally: + self._waiter = None + + message, payload = self._messages.popleft() + + start = loop.time() + + manager.requests_count += 1 + writer = StreamWriter(self, loop) + if isinstance(message, _ErrInfo): + # make request_factory work + request_handler = self._make_error_handler(message) + message = ERROR + else: + request_handler = self._request_handler + + request = self._request_factory(message, payload, self, writer, handler) + try: + # a new task is used for copy context vars (#3406) + task = self._loop.create_task( + self._handle_request(request, start, request_handler) + ) + try: + resp, reset = await task + except (asyncio.CancelledError, ConnectionError): + self.log_debug("Ignored premature client disconnection") + break + + # Drop the processed task from asyncio.Task.all_tasks() early + del task + if reset: + self.log_debug("Ignored premature client disconnection 2") + break + + # notify server about keep-alive + self._keepalive = bool(resp.keep_alive) + + # check payload + if not payload.is_eof(): + lingering_time = self._lingering_time + if not self._force_close and lingering_time: + self.log_debug( + "Start lingering close timer for %s sec.", lingering_time + ) + + now = loop.time() + end_t = now + lingering_time + + with suppress(asyncio.TimeoutError, asyncio.CancelledError): + while not payload.is_eof() and now < end_t: + async with ceil_timeout(end_t - now): + # read and ignore + await payload.readany() + now = loop.time() + + # if payload still uncompleted + if not payload.is_eof() and not self._force_close: + self.log_debug("Uncompleted request.") + self.close() + + payload.set_exception(PayloadAccessError()) + + except asyncio.CancelledError: + self.log_debug("Ignored premature client disconnection ") + break + except RuntimeError as exc: + if self.debug: + self.log_exception("Unhandled runtime exception", exc_info=exc) + self.force_close() + except Exception as exc: + self.log_exception("Unhandled exception", exc_info=exc) + self.force_close() + finally: + if self.transport is None and resp is not None: + self.log_debug("Ignored premature client disconnection.") + elif not self._force_close: + if self._keepalive and not self._close: + # start keep-alive timer + if keepalive_timeout is not None: + now = self._loop.time() + self._keepalive_time = now + if self._keepalive_handle is None: + self._keepalive_handle = loop.call_at( + now + keepalive_timeout, self._process_keepalive + ) + else: + break + + # remove handler, close transport if no handlers left + if not self._force_close: + self._task_handler = None + if self.transport is not None: + self.transport.close() + + async def finish_response( + self, request: BaseRequest, resp: StreamResponse, start_time: float + ) -> bool: + """Prepare the response and write_eof, then log access. + + This has to + be called within the context of any exception so the access logger + can get exception information. Returns True if the client disconnects + prematurely. + """ + if self._request_parser is not None: + self._request_parser.set_upgraded(False) + self._upgrade = False + if self._message_tail: + self._request_parser.feed_data(self._message_tail) + self._message_tail = b"" + try: + prepare_meth = resp.prepare + except AttributeError: + if resp is None: + raise RuntimeError("Missing return " "statement on request handler") + else: + raise RuntimeError( + "Web-handler should return " + "a response instance, " + "got {!r}".format(resp) + ) + try: + await prepare_meth(request) + await resp.write_eof() + except ConnectionError: + self.log_access(request, resp, start_time) + return True + else: + self.log_access(request, resp, start_time) + return False + + def handle_error( + self, + request: BaseRequest, + status: int = 500, + exc: Optional[BaseException] = None, + message: Optional[str] = None, + ) -> StreamResponse: + """Handle errors. + + Returns HTTP response with specific status code. Logs additional + information. It always closes current connection. + """ + self.log_exception("Error handling request", exc_info=exc) + + # some data already got sent, connection is broken + if request.writer.output_size > 0: + raise ConnectionError( + "Response is sent already, cannot send another response " + "with the error message" + ) + + ct = "text/plain" + if status == HTTPStatus.INTERNAL_SERVER_ERROR: + title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR) + msg = HTTPStatus.INTERNAL_SERVER_ERROR.description + tb = None + if self.debug: + with suppress(Exception): + tb = traceback.format_exc() + + if "text/html" in request.headers.get("Accept", ""): + if tb: + tb = html_escape(tb) + msg = f"

Traceback:

\n
{tb}
" + message = ( + "" + "{title}" + "\n

{title}

" + "\n{msg}\n\n" + ).format(title=title, msg=msg) + ct = "text/html" + else: + if tb: + msg = tb + message = title + "\n\n" + msg + + resp = Response(status=status, text=message, content_type=ct) + resp.force_close() + + return resp + + def _make_error_handler( + self, err_info: _ErrInfo + ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]: + async def handler(request: BaseRequest) -> StreamResponse: + return self.handle_error( + request, err_info.status, err_info.exc, err_info.message + ) + + return handler diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_request.py b/venv/lib/python3.8/site-packages/aiohttp/web_request.py new file mode 100644 index 0000000..c02ebfc --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_request.py @@ -0,0 +1,882 @@ +import asyncio +import datetime +import io +import re +import socket +import string +import tempfile +import types +import warnings +from http.cookies import SimpleCookie +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + Mapping, + MutableMapping, + Optional, + Pattern, + Tuple, + Union, + cast, +) +from urllib.parse import parse_qsl + +import attr +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + DEBUG, + ETAG_ANY, + LIST_QUOTED_ETAG_RE, + ChainMapProxy, + ETag, + HeadersMixin, + parse_http_date, + reify, + sentinel, +) +from .http_parser import RawRequestMessage +from .http_writer import HttpVersion +from .multipart import BodyPartReader, MultipartReader +from .streams import EmptyStreamReader, StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + Final, + JSONDecoder, + LooseHeaders, + RawHeaders, + StrOrURL, +) +from .web_exceptions import HTTPRequestEntityTooLarge +from .web_response import StreamResponse + +__all__ = ("BaseRequest", "FileField", "Request") + + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + from .web_protocol import RequestHandler + from .web_urldispatcher import UrlMappingMatchInfo + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class FileField: + name: str + filename: str + file: io.BufferedReader + content_type: str + headers: "CIMultiDictProxy[str]" + + +_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" +# '-' at the end to prevent interpretation as range in a char class + +_TOKEN: Final[str] = rf"[{_TCHAR}]+" + +_QDTEXT: Final[str] = r"[{}]".format( + r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))) +) +# qdtext includes 0x5C to escape 0x5D ('\]') +# qdtext excludes obs-text (because obsoleted, and encoding not specified) + +_QUOTED_PAIR: Final[str] = r"\\[\t !-~]" + +_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format( + qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR +) + +_FORWARDED_PAIR: Final[ + str +] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( + token=_TOKEN, quoted_string=_QUOTED_STRING +) + +_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") +# same pattern as _QUOTED_PAIR but contains a capture group + +_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR) + +############################################################ +# HTTP Request +############################################################ + + +class BaseRequest(MutableMapping[str, Any], HeadersMixin): + + POST_METHODS = { + hdrs.METH_PATCH, + hdrs.METH_POST, + hdrs.METH_PUT, + hdrs.METH_TRACE, + hdrs.METH_DELETE, + } + + ATTRS = HeadersMixin.ATTRS | frozenset( + [ + "_message", + "_protocol", + "_payload_writer", + "_payload", + "_headers", + "_method", + "_version", + "_rel_url", + "_post", + "_read_bytes", + "_state", + "_cache", + "_task", + "_client_max_size", + "_loop", + "_transport_sslcontext", + "_transport_peername", + ] + ) + + def __init__( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: "RequestHandler", + payload_writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + loop: asyncio.AbstractEventLoop, + *, + client_max_size: int = 1024**2, + state: Optional[Dict[str, Any]] = None, + scheme: Optional[str] = None, + host: Optional[str] = None, + remote: Optional[str] = None, + ) -> None: + if state is None: + state = {} + self._message = message + self._protocol = protocol + self._payload_writer = payload_writer + + self._payload = payload + self._headers = message.headers + self._method = message.method + self._version = message.version + self._cache: Dict[str, Any] = {} + url = message.url + if url.is_absolute(): + # absolute URL is given, + # override auto-calculating url, host, and scheme + # all other properties should be good + self._cache["url"] = url + self._cache["host"] = url.host + self._cache["scheme"] = url.scheme + self._rel_url = url.relative() + else: + self._rel_url = message.url + self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None + self._read_bytes: Optional[bytes] = None + + self._state = state + self._task = task + self._client_max_size = client_max_size + self._loop = loop + + transport = self._protocol.transport + assert transport is not None + self._transport_sslcontext = transport.get_extra_info("sslcontext") + self._transport_peername = transport.get_extra_info("peername") + + if scheme is not None: + self._cache["scheme"] = scheme + if host is not None: + self._cache["host"] = host + if remote is not None: + self._cache["remote"] = remote + + def clone( + self, + *, + method: str = sentinel, + rel_url: StrOrURL = sentinel, + headers: LooseHeaders = sentinel, + scheme: str = sentinel, + host: str = sentinel, + remote: str = sentinel, + ) -> "BaseRequest": + """Clone itself with replacement some attributes. + + Creates and returns a new instance of Request object. If no parameters + are given, an exact copy is returned. If a parameter is not passed, it + will reuse the one from the current request object. + """ + if self._read_bytes: + raise RuntimeError("Cannot clone request " "after reading its content") + + dct: Dict[str, Any] = {} + if method is not sentinel: + dct["method"] = method + if rel_url is not sentinel: + new_url = URL(rel_url) + dct["url"] = new_url + dct["path"] = str(new_url) + if headers is not sentinel: + # a copy semantic + dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) + dct["raw_headers"] = tuple( + (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + ) + + message = self._message._replace(**dct) + + kwargs = {} + if scheme is not sentinel: + kwargs["scheme"] = scheme + if host is not sentinel: + kwargs["host"] = host + if remote is not sentinel: + kwargs["remote"] = remote + + return self.__class__( + message, + self._payload, + self._protocol, + self._payload_writer, + self._task, + self._loop, + client_max_size=self._client_max_size, + state=self._state.copy(), + **kwargs, + ) + + @property + def task(self) -> "asyncio.Task[None]": + return self._task + + @property + def protocol(self) -> "RequestHandler": + return self._protocol + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def writer(self) -> AbstractStreamWriter: + return self._payload_writer + + @reify + def message(self) -> RawRequestMessage: + warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3) + return self._message + + @reify + def rel_url(self) -> URL: + return self._rel_url + + @reify + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "request.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + # MutableMapping API + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + ######## + + @reify + def secure(self) -> bool: + """A bool indicating if the request is handled with SSL.""" + return self.scheme == "https" + + @reify + def forwarded(self) -> Tuple[Mapping[str, str], ...]: + """A tuple containing all parsed Forwarded header(s). + + Makes an effort to parse Forwarded headers as specified by RFC 7239: + + - It adds one (immutable) dictionary per Forwarded 'field-value', ie + per proxy. The element corresponds to the data in the Forwarded + field-value added by the first proxy encountered by the client. Each + subsequent item corresponds to those added by later proxies. + - It checks that every value has valid syntax in general as specified + in section 4: either a 'token' or a 'quoted-string'. + - It un-escapes found escape sequences. + - It does NOT validate 'by' and 'for' contents as specified in section + 6. + - It does NOT validate 'host' contents (Host ABNF). + - It does NOT validate 'proto' contents for valid URI scheme names. + + Returns a tuple containing one or more immutable dicts + """ + elems = [] + for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): + length = len(field_value) + pos = 0 + need_separator = False + elem: Dict[str, str] = {} + elems.append(types.MappingProxyType(elem)) + while 0 <= pos < length: + match = _FORWARDED_PAIR_RE.match(field_value, pos) + if match is not None: # got a valid forwarded-pair + if need_separator: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + else: + name, value, port = match.groups() + if value[0] == '"': + # quoted string: remove quotes and unescape + value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1]) + if port: + value += port + elem[name.lower()] = value + pos += len(match.group(0)) + need_separator = True + elif field_value[pos] == ",": # next forwarded-element + need_separator = False + elem = {} + elems.append(types.MappingProxyType(elem)) + pos += 1 + elif field_value[pos] == ";": # next forwarded-pair + need_separator = False + pos += 1 + elif field_value[pos] in " \t": + # Allow whitespace even between forwarded-pairs, though + # RFC 7239 doesn't. This simplifies code and is in line + # with Postel's law. + pos += 1 + else: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + return tuple(elems) + + @reify + def scheme(self) -> str: + """A string representing the scheme of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(scheme=new_scheme) call. + - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. + + 'http' or 'https'. + """ + if self._transport_sslcontext: + return "https" + else: + return "http" + + @reify + def method(self) -> str: + """Read only property for getting HTTP method. + + The value is upper-cased str like 'GET', 'POST', 'PUT' etc. + """ + return self._method + + @reify + def version(self) -> HttpVersion: + """Read only property for getting HTTP version of request. + + Returns aiohttp.protocol.HttpVersion instance. + """ + return self._version + + @reify + def host(self) -> str: + """Hostname of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(host=new_host) call. + - HOST HTTP header + - socket.getfqdn() value + """ + host = self._message.headers.get(hdrs.HOST) + if host is not None: + return host + return socket.getfqdn() + + @reify + def remote(self) -> Optional[str]: + """Remote IP of client initiated HTTP request. + + The IP is resolved in this order: + + - overridden value by .clone(remote=new_remote) call. + - peername of opened socket + """ + if self._transport_peername is None: + return None + if isinstance(self._transport_peername, (list, tuple)): + return str(self._transport_peername[0]) + return str(self._transport_peername) + + @reify + def url(self) -> URL: + url = URL.build(scheme=self.scheme, host=self.host) + return url.join(self._rel_url) + + @reify + def path(self) -> str: + """The URL including *PATH INFO* without the host or scheme. + + E.g., ``/app/blog`` + """ + return self._rel_url.path + + @reify + def path_qs(self) -> str: + """The URL including PATH_INFO and the query string. + + E.g, /app/blog?id=10 + """ + return str(self._rel_url) + + @reify + def raw_path(self) -> str: + """The URL including raw *PATH INFO* without the host or scheme. + + Warning, the path is unquoted and may contains non valid URL characters + + E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` + """ + return self._message.path + + @reify + def query(self) -> "MultiDictProxy[str]": + """A multidict with all the variables in the query string.""" + return MultiDictProxy(self._rel_url.query) + + @reify + def query_string(self) -> str: + """The query string in the URL. + + E.g., id=10 + """ + return self._rel_url.query_string + + @reify + def headers(self) -> "CIMultiDictProxy[str]": + """A case-insensitive multidict proxy with all headers.""" + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + """A sequence of pairs for all headers.""" + return self._message.raw_headers + + @reify + def if_modified_since(self) -> Optional[datetime.datetime]: + """The value of If-Modified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) + + @reify + def if_unmodified_since(self) -> Optional[datetime.datetime]: + """The value of If-Unmodified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) + + @staticmethod + def _etag_values(etag_header: str) -> Iterator[ETag]: + """Extract `ETag` objects from raw header.""" + if etag_header == ETAG_ANY: + yield ETag( + is_weak=False, + value=ETAG_ANY, + ) + else: + for match in LIST_QUOTED_ETAG_RE.finditer(etag_header): + is_weak, value, garbage = match.group(2, 3, 4) + # Any symbol captured by 4th group means + # that the following sequence is invalid. + if garbage: + break + + yield ETag( + is_weak=bool(is_weak), + value=value, + ) + + @classmethod + def _if_match_or_none_impl( + cls, header_value: Optional[str] + ) -> Optional[Tuple[ETag, ...]]: + if not header_value: + return None + + return tuple(cls._etag_values(header_value)) + + @reify + def if_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH)) + + @reify + def if_none_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-None-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH)) + + @reify + def if_range(self) -> Optional[datetime.datetime]: + """The value of If-Range HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_RANGE)) + + @reify + def keep_alive(self) -> bool: + """Is keepalive enabled by client?""" + return not self._message.should_close + + @reify + def cookies(self) -> Mapping[str, str]: + """Return request cookies. + + A read-only dictionary-like object. + """ + raw = self.headers.get(hdrs.COOKIE, "") + parsed: SimpleCookie[str] = SimpleCookie(raw) + return MappingProxyType({key: val.value for key, val in parsed.items()}) + + @reify + def http_range(self) -> slice: + """The content of Range HTTP header. + + Return a slice instance. + + """ + rng = self._headers.get(hdrs.RANGE) + start, end = None, None + if rng is not None: + try: + pattern = r"^bytes=(\d*)-(\d*)$" + start, end = re.findall(pattern, rng)[0] + except IndexError: # pattern was not found in header + raise ValueError("range not in acceptable format") + + end = int(end) if end else None + start = int(start) if start else None + + if start is None and end is not None: + # end with no start is to return tail of content + start = -end + end = None + + if start is not None and end is not None: + # end is inclusive in range header, exclusive for slice + end += 1 + + if start >= end: + raise ValueError("start cannot be after end") + + if start is end is None: # No valid range supplied + raise ValueError("No start or end of range specified") + + return slice(start, end, 1) + + @reify + def content(self) -> StreamReader: + """Return raw payload stream.""" + return self._payload + + @property + def has_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + warnings.warn( + "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2 + ) + return not self._payload.at_eof() + + @property + def can_read_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + return not self._payload.at_eof() + + @reify + def body_exists(self) -> bool: + """Return True if request has HTTP BODY, False otherwise.""" + return type(self._payload) is not EmptyStreamReader + + async def release(self) -> None: + """Release request. + + Eat unread part of HTTP BODY if present. + """ + while not self._payload.at_eof(): + await self._payload.readany() + + async def read(self) -> bytes: + """Read request body if present. + + Returns bytes object with full request content. + """ + if self._read_bytes is None: + body = bytearray() + while True: + chunk = await self._payload.readany() + body.extend(chunk) + if self._client_max_size: + body_size = len(body) + if body_size >= self._client_max_size: + raise HTTPRequestEntityTooLarge( + max_size=self._client_max_size, actual_size=body_size + ) + if not chunk: + break + self._read_bytes = bytes(body) + return self._read_bytes + + async def text(self) -> str: + """Return BODY as text using encoding from .charset.""" + bytes_body = await self.read() + encoding = self.charset or "utf-8" + return bytes_body.decode(encoding) + + async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any: + """Return BODY as JSON.""" + body = await self.text() + return loads(body) + + async def multipart(self) -> MultipartReader: + """Return async iterator to process BODY as multipart.""" + return MultipartReader(self._headers, self._payload) + + async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": + """Return POST parameters.""" + if self._post is not None: + return self._post + if self._method not in self.POST_METHODS: + self._post = MultiDictProxy(MultiDict()) + return self._post + + content_type = self.content_type + if content_type not in ( + "", + "application/x-www-form-urlencoded", + "multipart/form-data", + ): + self._post = MultiDictProxy(MultiDict()) + return self._post + + out: MultiDict[Union[str, bytes, FileField]] = MultiDict() + + if content_type == "multipart/form-data": + multipart = await self.multipart() + max_size = self._client_max_size + + field = await multipart.next() + while field is not None: + size = 0 + field_ct = field.headers.get(hdrs.CONTENT_TYPE) + + if isinstance(field, BodyPartReader): + assert field.name is not None + + # Note that according to RFC 7578, the Content-Type header + # is optional, even for files, so we can't assume it's + # present. + # https://tools.ietf.org/html/rfc7578#section-4.4 + if field.filename: + # store file in temp file + tmp = tempfile.TemporaryFile() + chunk = await field.read_chunk(size=2**16) + while chunk: + chunk = field.decode(chunk) + tmp.write(chunk) + size += len(chunk) + if 0 < max_size < size: + tmp.close() + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + chunk = await field.read_chunk(size=2**16) + tmp.seek(0) + + if field_ct is None: + field_ct = "application/octet-stream" + + ff = FileField( + field.name, + field.filename, + cast(io.BufferedReader, tmp), + field_ct, + field.headers, + ) + out.add(field.name, ff) + else: + # deal with ordinary data + value = await field.read(decode=True) + if field_ct is None or field_ct.startswith("text/"): + charset = field.get_charset(default="utf-8") + out.add(field.name, value.decode(charset)) + else: + out.add(field.name, value) + size += len(value) + if 0 < max_size < size: + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + else: + raise ValueError( + "To decode nested multipart you need " "to use custom reader", + ) + + field = await multipart.next() + else: + data = await self.read() + if data: + charset = self.charset or "utf-8" + out.extend( + parse_qsl( + data.rstrip().decode(charset), + keep_blank_values=True, + encoding=charset, + ) + ) + + self._post = MultiDictProxy(out) + return self._post + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Extra info from protocol transport""" + protocol = self._protocol + if protocol is None: + return default + + transport = protocol.transport + if transport is None: + return default + + return transport.get_extra_info(name, default) + + def __repr__(self) -> str: + ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode( + "ascii" + ) + return "<{} {} {} >".format( + self.__class__.__name__, self._method, ascii_encodable_path + ) + + def __eq__(self, other: object) -> bool: + return id(self) == id(other) + + def __bool__(self) -> bool: + return True + + async def _prepare_hook(self, response: StreamResponse) -> None: + return + + def _cancel(self, exc: BaseException) -> None: + self._payload.set_exception(exc) + + +class Request(BaseRequest): + + ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + # matchdict, route_name, handler + # or information about traversal lookup + + # initialized after route resolving + self._match_info: Optional[UrlMappingMatchInfo] = None + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom {}.{} attribute " + "is discouraged".format(self.__class__.__name__, name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def clone( + self, + *, + method: str = sentinel, + rel_url: StrOrURL = sentinel, + headers: LooseHeaders = sentinel, + scheme: str = sentinel, + host: str = sentinel, + remote: str = sentinel, + ) -> "Request": + ret = super().clone( + method=method, + rel_url=rel_url, + headers=headers, + scheme=scheme, + host=host, + remote=remote, + ) + new_ret = cast(Request, ret) + new_ret._match_info = self._match_info + return new_ret + + @reify + def match_info(self) -> "UrlMappingMatchInfo": + """Result of route resolving.""" + match_info = self._match_info + assert match_info is not None + return match_info + + @property + def app(self) -> "Application": + """Application instance.""" + match_info = self._match_info + assert match_info is not None + return match_info.current_app + + @property + def config_dict(self) -> ChainMapProxy: + match_info = self._match_info + assert match_info is not None + lst = match_info.apps + app = self.app + idx = lst.index(app) + sublist = list(reversed(lst[: idx + 1])) + return ChainMapProxy(sublist) + + async def _prepare_hook(self, response: StreamResponse) -> None: + match_info = self._match_info + if match_info is None: + return + for app in match_info._apps: + await app.on_response_prepare.send(self, response) diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_response.py b/venv/lib/python3.8/site-packages/aiohttp/web_response.py new file mode 100644 index 0000000..ce07f81 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_response.py @@ -0,0 +1,825 @@ +import asyncio +import collections.abc +import datetime +import enum +import json +import math +import time +import warnings +import zlib +from concurrent.futures import Executor +from http.cookies import Morsel, SimpleCookie +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + Mapping, + MutableMapping, + Optional, + Tuple, + Union, + cast, +) + +from multidict import CIMultiDict, istr + +from . import hdrs, payload +from .abc import AbstractStreamWriter +from .helpers import ( + ETAG_ANY, + PY_38, + QUOTED_ETAG_RE, + ETag, + HeadersMixin, + parse_http_date, + rfc822_formatted_time, + sentinel, + validate_etag_value, +) +from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11 +from .payload import Payload +from .typedefs import JSONEncoder, LooseHeaders + +__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") + + +if TYPE_CHECKING: # pragma: no cover + from .web_request import BaseRequest + + BaseClass = MutableMapping[str, Any] +else: + BaseClass = collections.abc.MutableMapping + + +if not PY_38: + # allow samesite to be used in python < 3.8 + # already permitted in python 3.8, see https://bugs.python.org/issue29613 + Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined] + + +class ContentCoding(enum.Enum): + # The content codings that we have support for. + # + # Additional registered codings are listed at: + # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding + deflate = "deflate" + gzip = "gzip" + identity = "identity" + + +############################################################ +# HTTP Response classes +############################################################ + + +class StreamResponse(BaseClass, HeadersMixin): + + _length_check = True + + def __init__( + self, + *, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + ) -> None: + self._body = None + self._keep_alive: Optional[bool] = None + self._chunked = False + self._compression = False + self._compression_force: Optional[ContentCoding] = None + self._cookies: SimpleCookie[str] = SimpleCookie() + + self._req: Optional[BaseRequest] = None + self._payload_writer: Optional[AbstractStreamWriter] = None + self._eof_sent = False + self._body_length = 0 + self._state: Dict[str, Any] = {} + + if headers is not None: + self._headers: CIMultiDict[str] = CIMultiDict(headers) + else: + self._headers = CIMultiDict() + + self.set_status(status, reason) + + @property + def prepared(self) -> bool: + return self._payload_writer is not None + + @property + def task(self) -> "Optional[asyncio.Task[None]]": + if self._req: + return self._req.task + else: + return None + + @property + def status(self) -> int: + return self._status + + @property + def chunked(self) -> bool: + return self._chunked + + @property + def compression(self) -> bool: + return self._compression + + @property + def reason(self) -> str: + return self._reason + + def set_status( + self, + status: int, + reason: Optional[str] = None, + _RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES, + ) -> None: + assert not self.prepared, ( + "Cannot change the response status code after " "the headers have been sent" + ) + self._status = int(status) + if reason is None: + try: + reason = _RESPONSES[self._status][0] + except Exception: + reason = "" + self._reason = reason + + @property + def keep_alive(self) -> Optional[bool]: + return self._keep_alive + + def force_close(self) -> None: + self._keep_alive = False + + @property + def body_length(self) -> int: + return self._body_length + + @property + def output_length(self) -> int: + warnings.warn("output_length is deprecated", DeprecationWarning) + assert self._payload_writer + return self._payload_writer.buffer_size + + def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: + """Enables automatic chunked transfer encoding.""" + self._chunked = True + + if hdrs.CONTENT_LENGTH in self._headers: + raise RuntimeError( + "You can't enable chunked encoding when " "a content length is set" + ) + if chunk_size is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + + def enable_compression( + self, force: Optional[Union[bool, ContentCoding]] = None + ) -> None: + """Enables response compression encoding.""" + # Backwards compatibility for when force was a bool <0.17. + if type(force) == bool: + force = ContentCoding.deflate if force else ContentCoding.identity + warnings.warn( + "Using boolean for force is deprecated #3318", DeprecationWarning + ) + elif force is not None: + assert isinstance(force, ContentCoding), ( + "force should one of " "None, bool or " "ContentEncoding" + ) + + self._compression = True + self._compression_force = force + + @property + def headers(self) -> "CIMultiDict[str]": + return self._headers + + @property + def cookies(self) -> "SimpleCookie[str]": + return self._cookies + + def set_cookie( + self, + name: str, + value: str, + *, + expires: Optional[str] = None, + domain: Optional[str] = None, + max_age: Optional[Union[int, str]] = None, + path: str = "/", + secure: Optional[bool] = None, + httponly: Optional[bool] = None, + version: Optional[str] = None, + samesite: Optional[str] = None, + ) -> None: + """Set or update response cookie. + + Sets new cookie or updates existent with new value. + Also updates only those params which are not None. + """ + old = self._cookies.get(name) + if old is not None and old.coded_value == "": + # deleted cookie + self._cookies.pop(name, None) + + self._cookies[name] = value + c = self._cookies[name] + + if expires is not None: + c["expires"] = expires + elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT": + del c["expires"] + + if domain is not None: + c["domain"] = domain + + if max_age is not None: + c["max-age"] = str(max_age) + elif "max-age" in c: + del c["max-age"] + + c["path"] = path + + if secure is not None: + c["secure"] = secure + if httponly is not None: + c["httponly"] = httponly + if version is not None: + c["version"] = version + if samesite is not None: + c["samesite"] = samesite + + def del_cookie( + self, name: str, *, domain: Optional[str] = None, path: str = "/" + ) -> None: + """Delete cookie. + + Creates new empty expired cookie. + """ + # TODO: do we need domain/path here? + self._cookies.pop(name, None) + self.set_cookie( + name, + "", + max_age=0, + expires="Thu, 01 Jan 1970 00:00:00 GMT", + domain=domain, + path=path, + ) + + @property + def content_length(self) -> Optional[int]: + # Just a placeholder for adding setter + return super().content_length + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + if value is not None: + value = int(value) + if self._chunked: + raise RuntimeError( + "You can't set content length when " "chunked encoding is enable" + ) + self._headers[hdrs.CONTENT_LENGTH] = str(value) + else: + self._headers.pop(hdrs.CONTENT_LENGTH, None) + + @property + def content_type(self) -> str: + # Just a placeholder for adding setter + return super().content_type + + @content_type.setter + def content_type(self, value: str) -> None: + self.content_type # read header values if needed + self._content_type = str(value) + self._generate_content_type_header() + + @property + def charset(self) -> Optional[str]: + # Just a placeholder for adding setter + return super().charset + + @charset.setter + def charset(self, value: Optional[str]) -> None: + ctype = self.content_type # read header values if needed + if ctype == "application/octet-stream": + raise RuntimeError( + "Setting charset for application/octet-stream " + "doesn't make sense, setup content_type first" + ) + assert self._content_dict is not None + if value is None: + self._content_dict.pop("charset", None) + else: + self._content_dict["charset"] = str(value).lower() + self._generate_content_type_header() + + @property + def last_modified(self) -> Optional[datetime.datetime]: + """The value of Last-Modified HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED)) + + @last_modified.setter + def last_modified( + self, value: Optional[Union[int, float, datetime.datetime, str]] + ) -> None: + if value is None: + self._headers.pop(hdrs.LAST_MODIFIED, None) + elif isinstance(value, (int, float)): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)) + ) + elif isinstance(value, datetime.datetime): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple() + ) + elif isinstance(value, str): + self._headers[hdrs.LAST_MODIFIED] = value + + @property + def etag(self) -> Optional[ETag]: + quoted_value = self._headers.get(hdrs.ETAG) + if not quoted_value: + return None + elif quoted_value == ETAG_ANY: + return ETag(value=ETAG_ANY) + match = QUOTED_ETAG_RE.fullmatch(quoted_value) + if not match: + return None + is_weak, value = match.group(1, 2) + return ETag( + is_weak=bool(is_weak), + value=value, + ) + + @etag.setter + def etag(self, value: Optional[Union[ETag, str]]) -> None: + if value is None: + self._headers.pop(hdrs.ETAG, None) + elif (isinstance(value, str) and value == ETAG_ANY) or ( + isinstance(value, ETag) and value.value == ETAG_ANY + ): + self._headers[hdrs.ETAG] = ETAG_ANY + elif isinstance(value, str): + validate_etag_value(value) + self._headers[hdrs.ETAG] = f'"{value}"' + elif isinstance(value, ETag) and isinstance(value.value, str): + validate_etag_value(value.value) + hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"' + self._headers[hdrs.ETAG] = hdr_value + else: + raise ValueError( + f"Unsupported etag type: {type(value)}. " + f"etag must be str, ETag or None" + ) + + def _generate_content_type_header( + self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE + ) -> None: + assert self._content_dict is not None + assert self._content_type is not None + params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items()) + if params: + ctype = self._content_type + "; " + params + else: + ctype = self._content_type + self._headers[CONTENT_TYPE] = ctype + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if coding != ContentCoding.identity: + assert self._payload_writer is not None + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._payload_writer.enable_compression(coding.value) + # Compressed payload may have different content length, + # remove the header + self._headers.popall(hdrs.CONTENT_LENGTH, None) + + async def _start_compression(self, request: "BaseRequest") -> None: + if self._compression_force: + await self._do_start_compression(self._compression_force) + else: + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + for coding in ContentCoding: + if coding.value in accept_encoding: + await self._do_start_compression(coding) + return + + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + if self._eof_sent: + return None + if self._payload_writer is not None: + return self._payload_writer + + return await self._start(request) + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + self._req = request + writer = self._payload_writer = request._payload_writer + + await self._prepare_headers() + await request._prepare_hook(self) + await self._write_headers() + + return writer + + async def _prepare_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + keep_alive = self._keep_alive + if keep_alive is None: + keep_alive = request.keep_alive + self._keep_alive = keep_alive + + version = request.version + + headers = self._headers + for cookie in self._cookies.values(): + value = cookie.output(header="")[1:] + headers.add(hdrs.SET_COOKIE, value) + + if self._compression: + await self._start_compression(request) + + if self._chunked: + if version != HttpVersion11: + raise RuntimeError( + "Using chunked encoding is forbidden " + "for HTTP/{0.major}.{0.minor}".format(request.version) + ) + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + if hdrs.CONTENT_LENGTH in headers: + del headers[hdrs.CONTENT_LENGTH] + elif self._length_check: + writer.length = self.content_length + if writer.length is None: + if version >= HttpVersion11 and self.status != 204: + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + if hdrs.CONTENT_LENGTH in headers: + del headers[hdrs.CONTENT_LENGTH] + else: + keep_alive = False + # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2 + # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4 + elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204): + del headers[hdrs.CONTENT_LENGTH] + + if self.status not in (204, 304): + headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") + headers.setdefault(hdrs.DATE, rfc822_formatted_time()) + headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) + + # connection header + if hdrs.CONNECTION not in headers: + if keep_alive: + if version == HttpVersion10: + headers[hdrs.CONNECTION] = "keep-alive" + else: + if version == HttpVersion11: + headers[hdrs.CONNECTION] = "close" + + async def _write_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + # status line + version = request.version + status_line = "HTTP/{}.{} {} {}".format( + version[0], version[1], self._status, self._reason + ) + await writer.write_headers(status_line, self._headers) + + async def write(self, data: bytes) -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + raise RuntimeError("Cannot call write() after write_eof()") + if self._payload_writer is None: + raise RuntimeError("Cannot call write() before prepare()") + + await self._payload_writer.write(data) + + async def drain(self) -> None: + assert not self._eof_sent, "EOF has already been sent" + assert self._payload_writer is not None, "Response has not been started" + warnings.warn( + "drain method is deprecated, use await resp.write()", + DeprecationWarning, + stacklevel=2, + ) + await self._payload_writer.drain() + + async def write_eof(self, data: bytes = b"") -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + return + + assert self._payload_writer is not None, "Response has not been started" + + await self._payload_writer.write_eof(data) + self._eof_sent = True + self._req = None + self._body_length = self._payload_writer.output_size + self._payload_writer = None + + def __repr__(self) -> str: + if self._eof_sent: + info = "eof" + elif self.prepared: + assert self._req is not None + info = f"{self._req.method} {self._req.path} " + else: + info = "not prepared" + return f"<{self.__class__.__name__} {self.reason} {info}>" + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + def __hash__(self) -> int: + return hash(id(self)) + + def __eq__(self, other: object) -> bool: + return self is other + + +class Response(StreamResponse): + def __init__( + self, + *, + body: Any = None, + status: int = 200, + reason: Optional[str] = None, + text: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: Optional[str] = None, + charset: Optional[str] = None, + zlib_executor_size: Optional[int] = None, + zlib_executor: Optional[Executor] = None, + ) -> None: + if body is not None and text is not None: + raise ValueError("body and text are not allowed together") + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + elif not isinstance(headers, CIMultiDict): + real_headers = CIMultiDict(headers) + else: + real_headers = headers # = cast('CIMultiDict[str]', headers) + + if content_type is not None and "charset" in content_type: + raise ValueError("charset must not be in content_type " "argument") + + if text is not None: + if hdrs.CONTENT_TYPE in real_headers: + if content_type or charset: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + else: + # fast path for filling headers + if not isinstance(text, str): + raise TypeError("text argument must be str (%r)" % type(text)) + if content_type is None: + content_type = "text/plain" + if charset is None: + charset = "utf-8" + real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset + body = text.encode(charset) + text = None + else: + if hdrs.CONTENT_TYPE in real_headers: + if content_type is not None or charset is not None: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + else: + if content_type is not None: + if charset is not None: + content_type += "; charset=" + charset + real_headers[hdrs.CONTENT_TYPE] = content_type + + super().__init__(status=status, reason=reason, headers=real_headers) + + if text is not None: + self.text = text + else: + self.body = body + + self._compressed_body: Optional[bytes] = None + self._zlib_executor_size = zlib_executor_size + self._zlib_executor = zlib_executor + + @property + def body(self) -> Optional[Union[bytes, Payload]]: + return self._body + + @body.setter + def body( + self, + body: bytes, + CONTENT_TYPE: istr = hdrs.CONTENT_TYPE, + CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, + ) -> None: + if body is None: + self._body: Optional[bytes] = None + self._body_payload: bool = False + elif isinstance(body, (bytes, bytearray)): + self._body = body + self._body_payload = False + else: + try: + self._body = body = payload.PAYLOAD_REGISTRY.get(body) + except payload.LookupError: + raise ValueError("Unsupported body type %r" % type(body)) + + self._body_payload = True + + headers = self._headers + + # set content-length header if needed + if not self._chunked and CONTENT_LENGTH not in headers: + size = body.size + if size is not None: + headers[CONTENT_LENGTH] = str(size) + + # set content-type + if CONTENT_TYPE not in headers: + headers[CONTENT_TYPE] = body.content_type + + # copy payload headers + if body.headers: + for (key, value) in body.headers.items(): + if key not in headers: + headers[key] = value + + self._compressed_body = None + + @property + def text(self) -> Optional[str]: + if self._body is None: + return None + return self._body.decode(self.charset or "utf-8") + + @text.setter + def text(self, text: str) -> None: + assert text is None or isinstance( + text, str + ), "text argument must be str (%r)" % type(text) + + if self.content_type == "application/octet-stream": + self.content_type = "text/plain" + if self.charset is None: + self.charset = "utf-8" + + self._body = text.encode(self.charset) + self._body_payload = False + self._compressed_body = None + + @property + def content_length(self) -> Optional[int]: + if self._chunked: + return None + + if hdrs.CONTENT_LENGTH in self._headers: + return super().content_length + + if self._compressed_body is not None: + # Return length of the compressed body + return len(self._compressed_body) + elif self._body_payload: + # A payload without content length, or a compressed payload + return None + elif self._body is not None: + return len(self._body) + else: + return 0 + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + raise RuntimeError("Content length is set automatically") + + async def write_eof(self, data: bytes = b"") -> None: + if self._eof_sent: + return + if self._compressed_body is None: + body: Optional[Union[bytes, Payload]] = self._body + else: + body = self._compressed_body + assert not data, f"data arg is not supported, got {data!r}" + assert self._req is not None + assert self._payload_writer is not None + if body is not None: + if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]: + await super().write_eof() + elif self._body_payload: + payload = cast(Payload, body) + await payload.write(self._payload_writer) + await super().write_eof() + else: + await super().write_eof(cast(bytes, body)) + else: + await super().write_eof() + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: + if not self._body_payload: + if self._body is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body)) + else: + self._headers[hdrs.CONTENT_LENGTH] = "0" + + return await super()._start(request) + + def _compress_body(self, zlib_mode: int) -> None: + assert zlib_mode > 0 + compressobj = zlib.compressobj(wbits=zlib_mode) + body_in = self._body + assert body_in is not None + self._compressed_body = compressobj.compress(body_in) + compressobj.flush() + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if self._body_payload or self._chunked: + return await super()._do_start_compression(coding) + + if coding != ContentCoding.identity: + # Instead of using _payload_writer.enable_compression, + # compress the whole body + zlib_mode = ( + 16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS + ) + body_in = self._body + assert body_in is not None + if ( + self._zlib_executor_size is not None + and len(body_in) > self._zlib_executor_size + ): + await asyncio.get_event_loop().run_in_executor( + self._zlib_executor, self._compress_body, zlib_mode + ) + else: + self._compress_body(zlib_mode) + + body_out = self._compressed_body + assert body_out is not None + + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out)) + + +def json_response( + data: Any = sentinel, + *, + text: Optional[str] = None, + body: Optional[bytes] = None, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, +) -> Response: + if data is not sentinel: + if text or body: + raise ValueError("only one of data, text, or body should be specified") + else: + text = dumps(data) + return Response( + text=text, + body=body, + status=status, + reason=reason, + headers=headers, + content_type=content_type, + ) diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_routedef.py b/venv/lib/python3.8/site-packages/aiohttp/web_routedef.py new file mode 100644 index 0000000..a1eb0a7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_routedef.py @@ -0,0 +1,216 @@ +import abc +import os # noqa +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Sequence, + Type, + Union, + overload, +) + +import attr + +from . import hdrs +from .abc import AbstractView +from .typedefs import Handler, PathLike + +if TYPE_CHECKING: # pragma: no cover + from .web_request import Request + from .web_response import StreamResponse + from .web_urldispatcher import AbstractRoute, UrlDispatcher +else: + Request = StreamResponse = UrlDispatcher = AbstractRoute = None + + +__all__ = ( + "AbstractRouteDef", + "RouteDef", + "StaticDef", + "RouteTableDef", + "head", + "options", + "get", + "post", + "patch", + "put", + "delete", + "route", + "view", + "static", +) + + +class AbstractRouteDef(abc.ABC): + @abc.abstractmethod + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + pass # pragma: no cover + + +_HandlerType = Union[Type[AbstractView], Handler] + + +@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) +class RouteDef(AbstractRouteDef): + method: str + path: str + handler: _HandlerType + kwargs: Dict[str, Any] + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(f", {name}={value!r}") + return " {handler.__name__!r}" "{info}>".format( + method=self.method, path=self.path, handler=self.handler, info="".join(info) + ) + + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + if self.method in hdrs.METH_ALL: + reg = getattr(router, "add_" + self.method.lower()) + return [reg(self.path, self.handler, **self.kwargs)] + else: + return [ + router.add_route(self.method, self.path, self.handler, **self.kwargs) + ] + + +@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) +class StaticDef(AbstractRouteDef): + prefix: str + path: PathLike + kwargs: Dict[str, Any] + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(f", {name}={value!r}") + return " {path}" "{info}>".format( + prefix=self.prefix, path=self.path, info="".join(info) + ) + + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + resource = router.add_static(self.prefix, self.path, **self.kwargs) + routes = resource.get_info().get("routes", {}) + return list(routes.values()) + + +def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return RouteDef(method, path, handler, kwargs) + + +def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_HEAD, path, handler, **kwargs) + + +def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + +def get( + path: str, + handler: _HandlerType, + *, + name: Optional[str] = None, + allow_head: bool = True, + **kwargs: Any, +) -> RouteDef: + return route( + hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs + ) + + +def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_POST, path, handler, **kwargs) + + +def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PUT, path, handler, **kwargs) + + +def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PATCH, path, handler, **kwargs) + + +def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_DELETE, path, handler, **kwargs) + + +def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef: + return route(hdrs.METH_ANY, path, handler, **kwargs) + + +def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef: + return StaticDef(prefix, path, kwargs) + + +_Deco = Callable[[_HandlerType], _HandlerType] + + +class RouteTableDef(Sequence[AbstractRouteDef]): + """Route definition table""" + + def __init__(self) -> None: + self._items: List[AbstractRouteDef] = [] + + def __repr__(self) -> str: + return f"" + + @overload + def __getitem__(self, index: int) -> AbstractRouteDef: + ... + + @overload + def __getitem__(self, index: slice) -> List[AbstractRouteDef]: + ... + + def __getitem__(self, index): # type: ignore[no-untyped-def] + return self._items[index] + + def __iter__(self) -> Iterator[AbstractRouteDef]: + return iter(self._items) + + def __len__(self) -> int: + return len(self._items) + + def __contains__(self, item: object) -> bool: + return item in self._items + + def route(self, method: str, path: str, **kwargs: Any) -> _Deco: + def inner(handler: _HandlerType) -> _HandlerType: + self._items.append(RouteDef(method, path, handler, kwargs)) + return handler + + return inner + + def head(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_HEAD, path, **kwargs) + + def get(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_GET, path, **kwargs) + + def post(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_POST, path, **kwargs) + + def put(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PUT, path, **kwargs) + + def patch(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PATCH, path, **kwargs) + + def delete(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_DELETE, path, **kwargs) + + def options(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_OPTIONS, path, **kwargs) + + def view(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_ANY, path, **kwargs) + + def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None: + self._items.append(StaticDef(prefix, path, kwargs)) diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_runner.py b/venv/lib/python3.8/site-packages/aiohttp/web_runner.py new file mode 100644 index 0000000..9282bb9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_runner.py @@ -0,0 +1,381 @@ +import asyncio +import signal +import socket +from abc import ABC, abstractmethod +from typing import Any, List, Optional, Set + +from yarl import URL + +from .web_app import Application +from .web_server import Server + +try: + from ssl import SSLContext +except ImportError: + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ( + "BaseSite", + "TCPSite", + "UnixSite", + "NamedPipeSite", + "SockSite", + "BaseRunner", + "AppRunner", + "ServerRunner", + "GracefulExit", +) + + +class GracefulExit(SystemExit): + code = 1 + + +def _raise_graceful_exit() -> None: + raise GracefulExit() + + +class BaseSite(ABC): + __slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server") + + def __init__( + self, + runner: "BaseRunner", + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + if runner.server is None: + raise RuntimeError("Call runner.setup() before making a site") + self._runner = runner + self._shutdown_timeout = shutdown_timeout + self._ssl_context = ssl_context + self._backlog = backlog + self._server: Optional[asyncio.AbstractServer] = None + + @property + @abstractmethod + def name(self) -> str: + pass # pragma: no cover + + @abstractmethod + async def start(self) -> None: + self._runner._reg_site(self) + + async def stop(self) -> None: + self._runner._check_site(self) + if self._server is None: + self._runner._unreg_site(self) + return # not started yet + self._server.close() + # named pipes do not have wait_closed property + if hasattr(self._server, "wait_closed"): + await self._server.wait_closed() + await self._runner.shutdown() + assert self._runner.server + await self._runner.server.shutdown(self._shutdown_timeout) + self._runner._unreg_site(self) + + +class TCPSite(BaseSite): + __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port") + + def __init__( + self, + runner: "BaseRunner", + host: Optional[str] = None, + port: Optional[int] = None, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._host = host + if port is None: + port = 8443 if self._ssl_context else 8080 + self._port = port + self._reuse_address = reuse_address + self._reuse_port = reuse_port + + @property + def name(self) -> str: + scheme = "https" if self._ssl_context else "http" + host = "0.0.0.0" if self._host is None else self._host + return str(URL.build(scheme=scheme, host=host, port=self._port)) + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, + self._host, + self._port, + ssl=self._ssl_context, + backlog=self._backlog, + reuse_address=self._reuse_address, + reuse_port=self._reuse_port, + ) + + +class UnixSite(BaseSite): + __slots__ = ("_path",) + + def __init__( + self, + runner: "BaseRunner", + path: str, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._path = path + + @property + def name(self) -> str: + scheme = "https" if self._ssl_context else "http" + return f"{scheme}://unix:{self._path}:" + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_unix_server( + server, self._path, ssl=self._ssl_context, backlog=self._backlog + ) + + +class NamedPipeSite(BaseSite): + __slots__ = ("_path",) + + def __init__( + self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0 + ) -> None: + loop = asyncio.get_event_loop() + if not isinstance( + loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): + raise RuntimeError( + "Named Pipes only available in proactor" "loop under windows" + ) + super().__init__(runner, shutdown_timeout=shutdown_timeout) + self._path = path + + @property + def name(self) -> str: + return self._path + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + _server = await loop.start_serving_pipe( # type: ignore[attr-defined] + server, self._path + ) + self._server = _server[0] + + +class SockSite(BaseSite): + __slots__ = ("_sock", "_name") + + def __init__( + self, + runner: "BaseRunner", + sock: socket.socket, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._sock = sock + scheme = "https" if self._ssl_context else "http" + if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX: + name = f"{scheme}://unix:{sock.getsockname()}:" + else: + host, port = sock.getsockname()[:2] + name = str(URL.build(scheme=scheme, host=host, port=port)) + self._name = name + + @property + def name(self) -> str: + return self._name + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog + ) + + +class BaseRunner(ABC): + __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites") + + def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None: + self._handle_signals = handle_signals + self._kwargs = kwargs + self._server: Optional[Server] = None + self._sites: List[BaseSite] = [] + + @property + def server(self) -> Optional[Server]: + return self._server + + @property + def addresses(self) -> List[Any]: + ret: List[Any] = [] + for site in self._sites: + server = site._server + if server is not None: + sockets = server.sockets + if sockets is not None: + for sock in sockets: + ret.append(sock.getsockname()) + return ret + + @property + def sites(self) -> Set[BaseSite]: + return set(self._sites) + + async def setup(self) -> None: + loop = asyncio.get_event_loop() + + if self._handle_signals: + try: + loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit) + loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit) + except NotImplementedError: # pragma: no cover + # add_signal_handler is not implemented on Windows + pass + + self._server = await self._make_server() + + @abstractmethod + async def shutdown(self) -> None: + pass # pragma: no cover + + async def cleanup(self) -> None: + loop = asyncio.get_event_loop() + + # The loop over sites is intentional, an exception on gather() + # leaves self._sites in unpredictable state. + # The loop guaranties that a site is either deleted on success or + # still present on failure + for site in list(self._sites): + await site.stop() + await self._cleanup_server() + self._server = None + if self._handle_signals: + try: + loop.remove_signal_handler(signal.SIGINT) + loop.remove_signal_handler(signal.SIGTERM) + except NotImplementedError: # pragma: no cover + # remove_signal_handler is not implemented on Windows + pass + + @abstractmethod + async def _make_server(self) -> Server: + pass # pragma: no cover + + @abstractmethod + async def _cleanup_server(self) -> None: + pass # pragma: no cover + + def _reg_site(self, site: BaseSite) -> None: + if site in self._sites: + raise RuntimeError(f"Site {site} is already registered in runner {self}") + self._sites.append(site) + + def _check_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError(f"Site {site} is not registered in runner {self}") + + def _unreg_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError(f"Site {site} is not registered in runner {self}") + self._sites.remove(site) + + +class ServerRunner(BaseRunner): + """Low-level web server runner""" + + __slots__ = ("_web_server",) + + def __init__( + self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any + ) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + self._web_server = web_server + + async def shutdown(self) -> None: + pass + + async def _make_server(self) -> Server: + return self._web_server + + async def _cleanup_server(self) -> None: + pass + + +class AppRunner(BaseRunner): + """Web Application runner""" + + __slots__ = ("_app",) + + def __init__( + self, app: Application, *, handle_signals: bool = False, **kwargs: Any + ) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + if not isinstance(app, Application): + raise TypeError( + "The first argument should be web.Application " + "instance, got {!r}".format(app) + ) + self._app = app + + @property + def app(self) -> Application: + return self._app + + async def shutdown(self) -> None: + await self._app.shutdown() + + async def _make_server(self) -> Server: + loop = asyncio.get_event_loop() + self._app._set_loop(loop) + self._app.on_startup.freeze() + await self._app.startup() + self._app.freeze() + + return self._app._make_handler(loop=loop, **self._kwargs) + + async def _cleanup_server(self) -> None: + await self._app.cleanup() diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_server.py b/venv/lib/python3.8/site-packages/aiohttp/web_server.py new file mode 100644 index 0000000..fa46e90 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_server.py @@ -0,0 +1,62 @@ +"""Low level HTTP server.""" +import asyncio +from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa + +from .abc import AbstractStreamWriter +from .helpers import get_running_loop +from .http_parser import RawRequestMessage +from .streams import StreamReader +from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler +from .web_request import BaseRequest + +__all__ = ("Server",) + + +class Server: + def __init__( + self, + handler: _RequestHandler, + *, + request_factory: Optional[_RequestFactory] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any + ) -> None: + self._loop = get_running_loop(loop) + self._connections: Dict[RequestHandler, asyncio.Transport] = {} + self._kwargs = kwargs + self.requests_count = 0 + self.request_handler = handler + self.request_factory = request_factory or self._make_request + + @property + def connections(self) -> List[RequestHandler]: + return list(self._connections.keys()) + + def connection_made( + self, handler: RequestHandler, transport: asyncio.Transport + ) -> None: + self._connections[handler] = transport + + def connection_lost( + self, handler: RequestHandler, exc: Optional[BaseException] = None + ) -> None: + if handler in self._connections: + del self._connections[handler] + + def _make_request( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + ) -> BaseRequest: + return BaseRequest(message, payload, protocol, writer, task, self._loop) + + async def shutdown(self, timeout: Optional[float] = None) -> None: + coros = [conn.shutdown(timeout) for conn in self._connections] + await asyncio.gather(*coros) + self._connections.clear() + + def __call__(self) -> RequestHandler: + return RequestHandler(self, loop=self._loop, **self._kwargs) diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_urldispatcher.py b/venv/lib/python3.8/site-packages/aiohttp/web_urldispatcher.py new file mode 100644 index 0000000..5942e35 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_urldispatcher.py @@ -0,0 +1,1220 @@ +import abc +import asyncio +import base64 +import hashlib +import inspect +import keyword +import os +import re +import warnings +from contextlib import contextmanager +from functools import wraps +from pathlib import Path +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Container, + Dict, + Generator, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Set, + Sized, + Tuple, + Type, + Union, + cast, +) + +from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined] + +from . import hdrs +from .abc import AbstractMatchInfo, AbstractRouter, AbstractView +from .helpers import DEBUG +from .http import HttpVersion11 +from .typedefs import Final, Handler, PathLike, TypedDict +from .web_exceptions import ( + HTTPException, + HTTPExpectationFailed, + HTTPForbidden, + HTTPMethodNotAllowed, + HTTPNotFound, +) +from .web_fileresponse import FileResponse +from .web_request import Request +from .web_response import Response, StreamResponse +from .web_routedef import AbstractRouteDef + +__all__ = ( + "UrlDispatcher", + "UrlMappingMatchInfo", + "AbstractResource", + "Resource", + "PlainResource", + "DynamicResource", + "AbstractRoute", + "ResourceRoute", + "StaticResource", + "View", +) + + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + + BaseDict = Dict[str, str] +else: + BaseDict = dict + +YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) + +HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( + r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$" +) +ROUTE_RE: Final[Pattern[str]] = re.compile( + r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})" +) +PATH_SEP: Final[str] = re.escape("/") + + +_ExpectHandler = Callable[[Request], Awaitable[None]] +_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] + + +class _InfoDict(TypedDict, total=False): + path: str + + formatter: str + pattern: Pattern[str] + + directory: Path + prefix: str + routes: Mapping[str, "AbstractRoute"] + + app: "Application" + + domain: str + + rule: "AbstractRuleMatching" + + http_exception: HTTPException + + +class AbstractResource(Sized, Iterable["AbstractRoute"]): + def __init__(self, *, name: Optional[str] = None) -> None: + self._name = name + + @property + def name(self) -> Optional[str]: + return self._name + + @property + @abc.abstractmethod + def canonical(self) -> str: + """Exposes the resource's canonical path. + + For example '/foo/bar/{name}' + + """ + + @abc.abstractmethod # pragma: no branch + def url_for(self, **kwargs: str) -> URL: + """Construct url for resource with additional params.""" + + @abc.abstractmethod # pragma: no branch + async def resolve(self, request: Request) -> _Resolve: + """Resolve resource. + + Return (UrlMappingMatchInfo, allowed_methods) pair. + """ + + @abc.abstractmethod + def add_prefix(self, prefix: str) -> None: + """Add a prefix to processed URLs. + + Required for subapplications support. + """ + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + def freeze(self) -> None: + pass + + @abc.abstractmethod + def raw_match(self, path: str) -> bool: + """Perform a raw match against path""" + + +class AbstractRoute(abc.ABC): + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + *, + expect_handler: Optional[_ExpectHandler] = None, + resource: Optional[AbstractResource] = None, + ) -> None: + + if expect_handler is None: + expect_handler = _default_expect_handler + + assert asyncio.iscoroutinefunction( + expect_handler + ), f"Coroutine is expected, got {expect_handler!r}" + + method = method.upper() + if not HTTP_METHOD_RE.match(method): + raise ValueError(f"{method} is not allowed HTTP method") + + assert callable(handler), handler + if asyncio.iscoroutinefunction(handler): + pass + elif inspect.isgeneratorfunction(handler): + warnings.warn( + "Bare generators are deprecated, " "use @coroutine wrapper", + DeprecationWarning, + ) + elif isinstance(handler, type) and issubclass(handler, AbstractView): + pass + else: + warnings.warn( + "Bare functions are deprecated, " "use async ones", DeprecationWarning + ) + + @wraps(handler) + async def handler_wrapper(request: Request) -> StreamResponse: + result = old_handler(request) + if asyncio.iscoroutine(result): + return await result + return result # type: ignore[return-value] + + old_handler = handler + handler = handler_wrapper + + self._method = method + self._handler = handler + self._expect_handler = expect_handler + self._resource = resource + + @property + def method(self) -> str: + return self._method + + @property + def handler(self) -> Handler: + return self._handler + + @property + @abc.abstractmethod + def name(self) -> Optional[str]: + """Optional route's name, always equals to resource's name.""" + + @property + def resource(self) -> Optional[AbstractResource]: + return self._resource + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @abc.abstractmethod # pragma: no branch + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + + async def handle_expect_header(self, request: Request) -> None: + await self._expect_handler(request) + + +class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): + def __init__(self, match_dict: Dict[str, str], route: AbstractRoute): + super().__init__(match_dict) + self._route = route + self._apps: List[Application] = [] + self._current_app: Optional[Application] = None + self._frozen = False + + @property + def handler(self) -> Handler: + return self._route.handler + + @property + def route(self) -> AbstractRoute: + return self._route + + @property + def expect_handler(self) -> _ExpectHandler: + return self._route.handle_expect_header + + @property + def http_exception(self) -> Optional[HTTPException]: + return None + + def get_info(self) -> _InfoDict: # type: ignore[override] + return self._route.get_info() + + @property + def apps(self) -> Tuple["Application", ...]: + return tuple(self._apps) + + def add_app(self, app: "Application") -> None: + if self._frozen: + raise RuntimeError("Cannot change apps stack after .freeze() call") + if self._current_app is None: + self._current_app = app + self._apps.insert(0, app) + + @property + def current_app(self) -> "Application": + app = self._current_app + assert app is not None + return app + + @contextmanager + def set_current_app(self, app: "Application") -> Generator[None, None, None]: + if DEBUG: # pragma: no cover + if app not in self._apps: + raise RuntimeError( + "Expected one of the following apps {!r}, got {!r}".format( + self._apps, app + ) + ) + prev = self._current_app + self._current_app = app + try: + yield + finally: + self._current_app = prev + + def freeze(self) -> None: + self._frozen = True + + def __repr__(self) -> str: + return f"" + + +class MatchInfoError(UrlMappingMatchInfo): + def __init__(self, http_exception: HTTPException) -> None: + self._exception = http_exception + super().__init__({}, SystemRoute(self._exception)) + + @property + def http_exception(self) -> HTTPException: + return self._exception + + def __repr__(self) -> str: + return "".format( + self._exception.status, self._exception.reason + ) + + +async def _default_expect_handler(request: Request) -> None: + """Default handler for Expect header. + + Just send "100 Continue" to client. + raise HTTPExpectationFailed if value of header is not "100-continue" + """ + expect = request.headers.get(hdrs.EXPECT, "") + if request.version == HttpVersion11: + if expect.lower() == "100-continue": + await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + else: + raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) + + +class Resource(AbstractResource): + def __init__(self, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + self._routes: List[ResourceRoute] = [] + + def add_route( + self, + method: str, + handler: Union[Type[AbstractView], Handler], + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> "ResourceRoute": + + for route_obj in self._routes: + if route_obj.method == method or route_obj.method == hdrs.METH_ANY: + raise RuntimeError( + "Added route will never be executed, " + "method {route.method} is already " + "registered".format(route=route_obj) + ) + + route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) + self.register_route(route_obj) + return route_obj + + def register_route(self, route: "ResourceRoute") -> None: + assert isinstance( + route, ResourceRoute + ), f"Instance of Route class is required, got {route!r}" + self._routes.append(route) + + async def resolve(self, request: Request) -> _Resolve: + allowed_methods: Set[str] = set() + + match_dict = self._match(request.rel_url.raw_path) + if match_dict is None: + return None, allowed_methods + + for route_obj in self._routes: + route_method = route_obj.method + allowed_methods.add(route_method) + + if route_method == request.method or route_method == hdrs.METH_ANY: + return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods) + else: + return None, allowed_methods + + @abc.abstractmethod + def _match(self, path: str) -> Optional[Dict[str, str]]: + pass # pragma: no cover + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._routes) + + # TODO: implement all abstract methods + + +class PlainResource(Resource): + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + assert not path or path.startswith("/") + self._path = path + + @property + def canonical(self) -> str: + return self._path + + def freeze(self) -> None: + if not self._path: + self._path = "/" + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._path = prefix + self._path + + def _match(self, path: str) -> Optional[Dict[str, str]]: + # string comparison is about 10 times faster than regexp matching + if self._path == path: + return {} + else: + return None + + def raw_match(self, path: str) -> bool: + return self._path == path + + def get_info(self) -> _InfoDict: + return {"path": self._path} + + def url_for(self) -> URL: # type: ignore[override] + return URL.build(path=self._path, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return f"" + + +class DynamicResource(Resource): + + DYN = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*)\}") + DYN_WITH_RE = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}") + GOOD = r"[^{}/]+" + + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + pattern = "" + formatter = "" + for part in ROUTE_RE.split(path): + match = self.DYN.fullmatch(part) + if match: + pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD) + formatter += "{" + match.group("var") + "}" + continue + + match = self.DYN_WITH_RE.fullmatch(part) + if match: + pattern += "(?P<{var}>{re})".format(**match.groupdict()) + formatter += "{" + match.group("var") + "}" + continue + + if "{" in part or "}" in part: + raise ValueError(f"Invalid path '{path}'['{part}']") + + part = _requote_path(part) + formatter += part + pattern += re.escape(part) + + try: + compiled = re.compile(pattern) + except re.error as exc: + raise ValueError(f"Bad pattern '{pattern}': {exc}") from None + assert compiled.pattern.startswith(PATH_SEP) + assert formatter.startswith("/") + self._pattern = compiled + self._formatter = formatter + + @property + def canonical(self) -> str: + return self._formatter + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern) + self._formatter = prefix + self._formatter + + def _match(self, path: str) -> Optional[Dict[str, str]]: + match = self._pattern.fullmatch(path) + if match is None: + return None + else: + return { + key: _unquote_path(value) for key, value in match.groupdict().items() + } + + def raw_match(self, path: str) -> bool: + return self._formatter == path + + def get_info(self) -> _InfoDict: + return {"formatter": self._formatter, "pattern": self._pattern} + + def url_for(self, **parts: str) -> URL: + url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()}) + return URL.build(path=url, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return "".format( + name=name, formatter=self._formatter + ) + + +class PrefixResource(AbstractResource): + def __init__(self, prefix: str, *, name: Optional[str] = None) -> None: + assert not prefix or prefix.startswith("/"), prefix + assert prefix in ("", "/") or not prefix.endswith("/"), prefix + super().__init__(name=name) + self._prefix = _requote_path(prefix) + self._prefix2 = self._prefix + "/" + + @property + def canonical(self) -> str: + return self._prefix + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._prefix = prefix + self._prefix + self._prefix2 = self._prefix + "/" + + def raw_match(self, prefix: str) -> bool: + return False + + # TODO: impl missing abstract methods + + +class StaticResource(PrefixResource): + VERSION_KEY = "v" + + def __init__( + self, + prefix: str, + directory: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> None: + super().__init__(prefix, name=name) + try: + directory = Path(directory) + if str(directory).startswith("~"): + directory = Path(os.path.expanduser(str(directory))) + directory = directory.resolve() + if not directory.is_dir(): + raise ValueError("Not a directory") + except (FileNotFoundError, ValueError) as error: + raise ValueError(f"No directory exists at '{directory}'") from error + self._directory = directory + self._show_index = show_index + self._chunk_size = chunk_size + self._follow_symlinks = follow_symlinks + self._expect_handler = expect_handler + self._append_version = append_version + + self._routes = { + "GET": ResourceRoute( + "GET", self._handle, self, expect_handler=expect_handler + ), + "HEAD": ResourceRoute( + "HEAD", self._handle, self, expect_handler=expect_handler + ), + } + + def url_for( # type: ignore[override] + self, + *, + filename: Union[str, Path], + append_version: Optional[bool] = None, + ) -> URL: + if append_version is None: + append_version = self._append_version + if isinstance(filename, Path): + filename = str(filename) + filename = filename.lstrip("/") + + url = URL.build(path=self._prefix, encoded=True) + # filename is not encoded + if YARL_VERSION < (1, 6): + url = url / filename.replace("%", "%25") + else: + url = url / filename + + if append_version: + try: + filepath = self._directory.joinpath(filename).resolve() + if not self._follow_symlinks: + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError): + # ValueError for case when path point to symlink + # with follow_symlinks is False + return url # relatively safe + if filepath.is_file(): + # TODO cache file content + # with file watcher for cache invalidation + with filepath.open("rb") as f: + file_bytes = f.read() + h = self._get_file_hash(file_bytes) + url = url.with_query({self.VERSION_KEY: h}) + return url + return url + + @staticmethod + def _get_file_hash(byte_array: bytes) -> str: + m = hashlib.sha256() # todo sha256 can be configurable param + m.update(byte_array) + b64 = base64.urlsafe_b64encode(m.digest()) + return b64.decode("ascii") + + def get_info(self) -> _InfoDict: + return { + "directory": self._directory, + "prefix": self._prefix, + "routes": self._routes, + } + + def set_options_route(self, handler: Handler) -> None: + if "OPTIONS" in self._routes: + raise RuntimeError("OPTIONS route was set already") + self._routes["OPTIONS"] = ResourceRoute( + "OPTIONS", handler, self, expect_handler=self._expect_handler + ) + + async def resolve(self, request: Request) -> _Resolve: + path = request.rel_url.raw_path + method = request.method + allowed_methods = set(self._routes) + if not path.startswith(self._prefix2) and path != self._prefix: + return None, set() + + if method not in allowed_methods: + return None, allowed_methods + + match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])} + return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._routes.values()) + + async def _handle(self, request: Request) -> StreamResponse: + rel_url = request.match_info["filename"] + try: + filename = Path(rel_url) + if filename.anchor: + # rel_url is an absolute name like + # /static/\\machine_name\c$ or /static/D:\path + # where the static dir is totally different + raise HTTPForbidden() + filepath = self._directory.joinpath(filename).resolve() + if not self._follow_symlinks: + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError) as error: + # relatively safe + raise HTTPNotFound() from error + except HTTPForbidden: + raise + except Exception as error: + # perm error or other kind! + request.app.logger.exception(error) + raise HTTPNotFound() from error + + # on opening a dir, load its contents if allowed + if filepath.is_dir(): + if self._show_index: + try: + return Response( + text=self._directory_as_html(filepath), content_type="text/html" + ) + except PermissionError: + raise HTTPForbidden() + else: + raise HTTPForbidden() + elif filepath.is_file(): + return FileResponse(filepath, chunk_size=self._chunk_size) + else: + raise HTTPNotFound + + def _directory_as_html(self, filepath: Path) -> str: + # returns directory's index as html + + # sanity check + assert filepath.is_dir() + + relative_path_to_dir = filepath.relative_to(self._directory).as_posix() + index_of = f"Index of /{relative_path_to_dir}" + h1 = f"

{index_of}

" + + index_list = [] + dir_index = filepath.iterdir() + for _file in sorted(dir_index): + # show file url as relative to static path + rel_path = _file.relative_to(self._directory).as_posix() + file_url = self._prefix + "/" + rel_path + + # if file is a directory, add '/' to the end of the name + if _file.is_dir(): + file_name = f"{_file.name}/" + else: + file_name = _file.name + + index_list.append( + '
  • {name}
  • '.format( + url=file_url, name=file_name + ) + ) + ul = "
      \n{}\n
    ".format("\n".join(index_list)) + body = f"\n{h1}\n{ul}\n" + + head_str = f"\n{index_of}\n" + html = f"\n{head_str}\n{body}\n" + + return html + + def __repr__(self) -> str: + name = "'" + self.name + "'" if self.name is not None else "" + return " {directory!r}>".format( + name=name, path=self._prefix, directory=self._directory + ) + + +class PrefixedSubAppResource(PrefixResource): + def __init__(self, prefix: str, app: "Application") -> None: + super().__init__(prefix) + self._app = app + for resource in app.router.resources(): + resource.add_prefix(prefix) + + def add_prefix(self, prefix: str) -> None: + super().add_prefix(prefix) + for resource in self._app.router.resources(): + resource.add_prefix(prefix) + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not supported " "by sub-application root") + + def get_info(self) -> _InfoDict: + return {"app": self._app, "prefix": self._prefix} + + async def resolve(self, request: Request) -> _Resolve: + if ( + not request.url.raw_path.startswith(self._prefix2) + and request.url.raw_path != self._prefix + ): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __len__(self) -> int: + return len(self._app.router.routes()) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._app.router.routes()) + + def __repr__(self) -> str: + return " {app!r}>".format( + prefix=self._prefix, app=self._app + ) + + +class AbstractRuleMatching(abc.ABC): + @abc.abstractmethod # pragma: no branch + async def match(self, request: Request) -> bool: + """Return bool if the request satisfies the criteria""" + + @abc.abstractmethod # pragma: no branch + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @property + @abc.abstractmethod # pragma: no branch + def canonical(self) -> str: + """Return a str""" + + +class Domain(AbstractRuleMatching): + re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(? None: + super().__init__() + self._domain = self.validation(domain) + + @property + def canonical(self) -> str: + return self._domain + + def validation(self, domain: str) -> str: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + domain = domain.rstrip(".").lower() + if not domain: + raise ValueError("Domain cannot be empty") + elif "://" in domain: + raise ValueError("Scheme not supported") + url = URL("http://" + domain) + assert url.raw_host is not None + if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")): + raise ValueError("Domain not valid") + if url.port == 80: + return url.raw_host + return f"{url.raw_host}:{url.port}" + + async def match(self, request: Request) -> bool: + host = request.headers.get(hdrs.HOST) + if not host: + return False + return self.match_domain(host) + + def match_domain(self, host: str) -> bool: + return host.lower() == self._domain + + def get_info(self) -> _InfoDict: + return {"domain": self._domain} + + +class MaskDomain(Domain): + re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(? None: + super().__init__(domain) + mask = self._domain.replace(".", r"\.").replace("*", ".*") + self._mask = re.compile(mask) + + @property + def canonical(self) -> str: + return self._mask.pattern + + def match_domain(self, host: str) -> bool: + return self._mask.fullmatch(host) is not None + + +class MatchedSubAppResource(PrefixedSubAppResource): + def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None: + AbstractResource.__init__(self) + self._prefix = "" + self._app = app + self._rule = rule + + @property + def canonical(self) -> str: + return self._rule.canonical + + def get_info(self) -> _InfoDict: + return {"app": self._app, "rule": self._rule} + + async def resolve(self, request: Request) -> _Resolve: + if not await self._rule.match(request): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __repr__(self) -> str: + return " {app!r}>" "".format(app=self._app) + + +class ResourceRoute(AbstractRoute): + """A route with resource""" + + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + resource: AbstractResource, + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> None: + super().__init__( + method, handler, expect_handler=expect_handler, resource=resource + ) + + def __repr__(self) -> str: + return " {handler!r}".format( + method=self.method, resource=self._resource, handler=self.handler + ) + + @property + def name(self) -> Optional[str]: + if self._resource is None: + return None + return self._resource.name + + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + assert self._resource is not None + return self._resource.url_for(*args, **kwargs) + + def get_info(self) -> _InfoDict: + assert self._resource is not None + return self._resource.get_info() + + +class SystemRoute(AbstractRoute): + def __init__(self, http_exception: HTTPException) -> None: + super().__init__(hdrs.METH_ANY, self._handle) + self._http_exception = http_exception + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not allowed for SystemRoute") + + @property + def name(self) -> Optional[str]: + return None + + def get_info(self) -> _InfoDict: + return {"http_exception": self._http_exception} + + async def _handle(self, request: Request) -> StreamResponse: + raise self._http_exception + + @property + def status(self) -> int: + return self._http_exception.status + + @property + def reason(self) -> str: + return self._http_exception.reason + + def __repr__(self) -> str: + return "".format(self=self) + + +class View(AbstractView): + async def _iter(self) -> StreamResponse: + if self.request.method not in hdrs.METH_ALL: + self._raise_allowed_methods() + method: Callable[[], Awaitable[StreamResponse]] = getattr( + self, self.request.method.lower(), None + ) + if method is None: + self._raise_allowed_methods() + resp = await method() + return resp + + def __await__(self) -> Generator[Any, None, StreamResponse]: + return self._iter().__await__() + + def _raise_allowed_methods(self) -> None: + allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())} + raise HTTPMethodNotAllowed(self.request.method, allowed_methods) + + +class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]): + def __init__(self, resources: List[AbstractResource]) -> None: + self._resources = resources + + def __len__(self) -> int: + return len(self._resources) + + def __iter__(self) -> Iterator[AbstractResource]: + yield from self._resources + + def __contains__(self, resource: object) -> bool: + return resource in self._resources + + +class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): + def __init__(self, resources: List[AbstractResource]): + self._routes: List[AbstractRoute] = [] + for resource in resources: + for route in resource: + self._routes.append(route) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + yield from self._routes + + def __contains__(self, route: object) -> bool: + return route in self._routes + + +class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): + + NAME_SPLIT_RE = re.compile(r"[.:-]") + + def __init__(self) -> None: + super().__init__() + self._resources: List[AbstractResource] = [] + self._named_resources: Dict[str, AbstractResource] = {} + + async def resolve(self, request: Request) -> UrlMappingMatchInfo: + method = request.method + allowed_methods: Set[str] = set() + + for resource in self._resources: + match_dict, allowed = await resource.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + + if allowed_methods: + return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods)) + else: + return MatchInfoError(HTTPNotFound()) + + def __iter__(self) -> Iterator[str]: + return iter(self._named_resources) + + def __len__(self) -> int: + return len(self._named_resources) + + def __contains__(self, resource: object) -> bool: + return resource in self._named_resources + + def __getitem__(self, name: str) -> AbstractResource: + return self._named_resources[name] + + def resources(self) -> ResourcesView: + return ResourcesView(self._resources) + + def routes(self) -> RoutesView: + return RoutesView(self._resources) + + def named_resources(self) -> Mapping[str, AbstractResource]: + return MappingProxyType(self._named_resources) + + def register_resource(self, resource: AbstractResource) -> None: + assert isinstance( + resource, AbstractResource + ), f"Instance of AbstractResource class is required, got {resource!r}" + if self.frozen: + raise RuntimeError("Cannot register a resource into frozen router.") + + name = resource.name + + if name is not None: + parts = self.NAME_SPLIT_RE.split(name) + for part in parts: + if keyword.iskeyword(part): + raise ValueError( + f"Incorrect route name {name!r}, " + "python keywords cannot be used " + "for route name" + ) + if not part.isidentifier(): + raise ValueError( + "Incorrect route name {!r}, " + "the name should be a sequence of " + "python identifiers separated " + "by dash, dot or column".format(name) + ) + if name in self._named_resources: + raise ValueError( + "Duplicate {!r}, " + "already handled by {!r}".format(name, self._named_resources[name]) + ) + self._named_resources[name] = resource + self._resources.append(resource) + + def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: + if path and not path.startswith("/"): + raise ValueError("path should be started with / or be empty") + # Reuse last added resource if path and name are the same + if self._resources: + resource = self._resources[-1] + if resource.name == name and resource.raw_match(path): + return cast(Resource, resource) + if not ("{" in path or "}" in path or ROUTE_RE.search(path)): + resource = PlainResource(_requote_path(path), name=name) + self.register_resource(resource) + return resource + resource = DynamicResource(path, name=name) + self.register_resource(resource) + return resource + + def add_route( + self, + method: str, + path: str, + handler: Union[Handler, Type[AbstractView]], + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + ) -> AbstractRoute: + resource = self.add_resource(path, name=name) + return resource.add_route(method, handler, expect_handler=expect_handler) + + def add_static( + self, + prefix: str, + path: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> AbstractResource: + """Add static files view. + + prefix - url prefix + path - folder with files + + """ + assert prefix.startswith("/") + if prefix.endswith("/"): + prefix = prefix[:-1] + resource = StaticResource( + prefix, + path, + name=name, + expect_handler=expect_handler, + chunk_size=chunk_size, + show_index=show_index, + follow_symlinks=follow_symlinks, + append_version=append_version, + ) + self.register_resource(resource) + return resource + + def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method HEAD.""" + return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) + + def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method OPTIONS.""" + return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + def add_get( + self, + path: str, + handler: Handler, + *, + name: Optional[str] = None, + allow_head: bool = True, + **kwargs: Any, + ) -> AbstractRoute: + """Shortcut for add_route with method GET. + + If allow_head is true, another + route is added allowing head requests to the same endpoint. + """ + resource = self.add_resource(path, name=name) + if allow_head: + resource.add_route(hdrs.METH_HEAD, handler, **kwargs) + return resource.add_route(hdrs.METH_GET, handler, **kwargs) + + def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method POST.""" + return self.add_route(hdrs.METH_POST, path, handler, **kwargs) + + def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PUT.""" + return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) + + def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PATCH.""" + return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) + + def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method DELETE.""" + return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) + + def add_view( + self, path: str, handler: Type[AbstractView], **kwargs: Any + ) -> AbstractRoute: + """Shortcut for add_route with ANY methods for a class-based view.""" + return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) + + def freeze(self) -> None: + super().freeze() + for resource in self._resources: + resource.freeze() + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + """Append routes to route table. + + Parameter should be a sequence of RouteDef objects. + + Returns a list of registered AbstractRoute instances. + """ + registered_routes = [] + for route_def in routes: + registered_routes.extend(route_def.register(self)) + return registered_routes + + +def _quote_path(value: str) -> str: + if YARL_VERSION < (1, 6): + value = value.replace("%", "%25") + return URL.build(path=value, encoded=False).raw_path + + +def _unquote_path(value: str) -> str: + return URL.build(path=value, encoded=True).path + + +def _requote_path(value: str) -> str: + # Quote non-ascii characters and other characters which must be quoted, + # but preserve existing %-sequences. + result = _quote_path(value) + if "%" in value: + result = result.replace("%25", "%") + return result diff --git a/venv/lib/python3.8/site-packages/aiohttp/web_ws.py b/venv/lib/python3.8/site-packages/aiohttp/web_ws.py new file mode 100644 index 0000000..0d32a21 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/web_ws.py @@ -0,0 +1,487 @@ +import asyncio +import base64 +import binascii +import hashlib +import json +from typing import Any, Iterable, Optional, Tuple, cast + +import async_timeout +import attr +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import call_later, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WS_KEY, + WebSocketError, + WebSocketReader, + WebSocketWriter, + WSCloseCode, + WSMessage, + WSMsgType as WSMsgType, + ws_ext_gen, + ws_ext_parse, +) +from .log import ws_logger +from .streams import EofStream, FlowControlDataQueue +from .typedefs import Final, JSONDecoder, JSONEncoder +from .web_exceptions import HTTPBadRequest, HTTPException +from .web_request import BaseRequest +from .web_response import StreamResponse + +__all__ = ( + "WebSocketResponse", + "WebSocketReady", + "WSMsgType", +) + +THRESHOLD_CONNLOST_ACCESS: Final[int] = 5 + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class WebSocketReady: + ok: bool + protocol: Optional[str] + + def __bool__(self) -> bool: + return self.ok + + +class WebSocketResponse(StreamResponse): + + _length_check = False + + def __init__( + self, + *, + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + protocols: Iterable[str] = (), + compress: bool = True, + max_msg_size: int = 4 * 1024 * 1024, + ) -> None: + super().__init__(status=101) + self._protocols = protocols + self._ws_protocol: Optional[str] = None + self._writer: Optional[WebSocketWriter] = None + self._reader: Optional[FlowControlDataQueue[WSMessage]] = None + self._closed = False + self._closing = False + self._conn_lost = 0 + self._close_code: Optional[int] = None + self._loop: Optional[asyncio.AbstractEventLoop] = None + self._waiting: Optional[asyncio.Future[bool]] = None + self._exception: Optional[BaseException] = None + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb: Optional[asyncio.TimerHandle] = None + self._compress = compress + self._max_msg_size = max_msg_size + + def _cancel_heartbeat(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + + def _reset_heartbeat(self) -> None: + self._cancel_heartbeat() + + if self._heartbeat is not None: + assert self._loop is not None + self._heartbeat_cb = call_later( + self._send_heartbeat, self._heartbeat, self._loop + ) + + def _send_heartbeat(self) -> None: + if self._heartbeat is not None and not self._closed: + assert self._loop is not None + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + self._loop.create_task(self._writer.ping()) # type: ignore[union-attr] + + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = call_later( + self._pong_not_received, self._pong_heartbeat, self._loop + ) + + def _pong_not_received(self) -> None: + if self._req is not None and self._req.transport is not None: + self._closed = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = asyncio.TimeoutError() + self._req.transport.close() + + async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: + # make pre-check to don't hide it by do_handshake() exceptions + if self._payload_writer is not None: + return self._payload_writer + + protocol, writer = self._pre_start(request) + payload_writer = await super().prepare(request) + assert payload_writer is not None + self._post_start(request, protocol, writer) + await payload_writer.drain() + return payload_writer + + def _handshake( + self, request: BaseRequest + ) -> Tuple["CIMultiDict[str]", str, bool, bool]: + headers = request.headers + if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip(): + raise HTTPBadRequest( + text=( + "No WebSocket UPGRADE hdr: {}\n Can " + '"Upgrade" only to "WebSocket".' + ).format(headers.get(hdrs.UPGRADE)) + ) + + if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower(): + raise HTTPBadRequest( + text="No CONNECTION upgrade hdr: {}".format( + headers.get(hdrs.CONNECTION) + ) + ) + + # find common sub-protocol between client and server + protocol = None + if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: + req_protocols = [ + str(proto.strip()) + for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") + ] + + for proto in req_protocols: + if proto in self._protocols: + protocol = proto + break + else: + # No overlap found: Return no protocol as per spec + ws_logger.warning( + "Client protocols %r don’t overlap server-known ones %r", + req_protocols, + self._protocols, + ) + + # check supported version + version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "") + if version not in ("13", "8", "7"): + raise HTTPBadRequest(text=f"Unsupported version: {version}") + + # check client handshake for validity + key = headers.get(hdrs.SEC_WEBSOCKET_KEY) + try: + if not key or len(base64.b64decode(key)) != 16: + raise HTTPBadRequest(text=f"Handshake error: {key!r}") + except binascii.Error: + raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None + + accept_val = base64.b64encode( + hashlib.sha1(key.encode() + WS_KEY).digest() + ).decode() + response_headers = CIMultiDict( + { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: accept_val, + } + ) + + notakeover = False + compress = 0 + if self._compress: + extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + # Server side always get return with no exception. + # If something happened, just drop compress extension + compress, notakeover = ws_ext_parse(extensions, isserver=True) + if compress: + enabledext = ws_ext_gen( + compress=compress, isserver=True, server_notakeover=notakeover + ) + response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext + + if protocol: + response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol + return ( + response_headers, + protocol, + compress, + notakeover, + ) # type: ignore[return-value] + + def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: + self._loop = request._loop + + headers, protocol, compress, notakeover = self._handshake(request) + + self.set_status(101) + self.headers.update(headers) + self.force_close() + self._compress = compress + transport = request._protocol.transport + assert transport is not None + writer = WebSocketWriter( + request._protocol, transport, compress=compress, notakeover=notakeover + ) + + return protocol, writer + + def _post_start( + self, request: BaseRequest, protocol: str, writer: WebSocketWriter + ) -> None: + self._ws_protocol = protocol + self._writer = writer + + self._reset_heartbeat() + + loop = self._loop + assert loop is not None + self._reader = FlowControlDataQueue(request._protocol, 2**16, loop=loop) + request.protocol.set_parser( + WebSocketReader(self._reader, self._max_msg_size, compress=self._compress) + ) + # disable HTTP keepalive for WebSocket + request.protocol.keep_alive(False) + + def can_prepare(self, request: BaseRequest) -> WebSocketReady: + if self._writer is not None: + raise RuntimeError("Already started") + try: + _, protocol, _, _ = self._handshake(request) + except HTTPException: + return WebSocketReady(False, None) + else: + return WebSocketReady(True, protocol) + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def ws_protocol(self) -> Optional[str]: + return self._ws_protocol + + @property + def compress(self) -> bool: + return self._compress + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes = b"") -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + await self._writer.ping(message) + + async def pong(self, message: bytes = b"") -> None: + # unsolicited pong + if self._writer is None: + raise RuntimeError("Call .prepare() first") + await self._writer.pong(message) + + async def send_str(self, data: str, compress: Optional[bool] = None) -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + if not isinstance(data, str): + raise TypeError("data argument must be str (%r)" % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError("data argument must be byte-ish (%r)" % type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json( + self, + data: Any, + compress: Optional[bool] = None, + *, + dumps: JSONEncoder = json.dumps, + ) -> None: + await self.send_str(dumps(data), compress=compress) + + async def write_eof(self) -> None: # type: ignore[override] + if self._eof_sent: + return + if self._payload_writer is None: + raise RuntimeError("Response has not been started") + + await self.close() + self._eof_sent = True + + async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + + self._cancel_heartbeat() + reader = self._reader + assert reader is not None + + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting is not None and not self._closed: + reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._waiting + + if not self._closed: + self._closed = True + try: + await self._writer.close(code, message) + writer = self._payload_writer + assert writer is not None + await writer.drain() + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + return True + + if self._closing: + return True + + reader = self._reader + assert reader is not None + try: + async with async_timeout.timeout(self._timeout): + msg = await reader.read() + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + return True + + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = asyncio.TimeoutError() + return True + else: + return False + + async def receive(self, timeout: Optional[float] = None) -> WSMessage: + if self._reader is None: + raise RuntimeError("Call .prepare() first") + + loop = self._loop + assert loop is not None + while True: + if self._waiting is not None: + raise RuntimeError("Concurrent call to receive() is not allowed") + + if self._closed: + self._conn_lost += 1 + if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: + raise RuntimeError("WebSocket connection is closed.") + return WS_CLOSED_MESSAGE + elif self._closing: + return WS_CLOSING_MESSAGE + + try: + self._waiting = loop.create_future() + try: + async with async_timeout.timeout(timeout or self._receive_timeout): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + waiter = self._waiting + set_result(waiter, True) + self._waiting = None + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except EofStream: + self._close_code = WSCloseCode.OK + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + await self.close() + elif msg.type == WSMsgType.CLOSING: + self._closing = True + elif msg.type == WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type == WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float] = None) -> str: + msg = await self.receive(timeout) + if msg.type != WSMsgType.TEXT: + raise TypeError( + "Received message {}:{!r} is not WSMsgType.TEXT".format( + msg.type, msg.data + ) + ) + return cast(str, msg.data) + + async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + msg = await self.receive(timeout) + if msg.type != WSMsgType.BINARY: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + return cast(bytes, msg.data) + + async def receive_json( + self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None + ) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + async def write(self, data: bytes) -> None: + raise RuntimeError("Cannot call .write() for websocket") + + def __aiter__(self) -> "WebSocketResponse": + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): + raise StopAsyncIteration + return msg + + def _cancel(self, exc: BaseException) -> None: + if self._reader is not None: + self._reader.set_exception(exc) diff --git a/venv/lib/python3.8/site-packages/aiohttp/worker.py b/venv/lib/python3.8/site-packages/aiohttp/worker.py new file mode 100644 index 0000000..f130289 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiohttp/worker.py @@ -0,0 +1,269 @@ +"""Async gunicorn worker for aiohttp.web""" + +import asyncio +import os +import re +import signal +import sys +from types import FrameType +from typing import Any, Awaitable, Callable, Optional, Union # noqa + +from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat +from gunicorn.workers import base + +from aiohttp import web + +from .helpers import set_result +from .web_app import Application +from .web_log import AccessLogger + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker") + + +class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported] + + DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT + DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default + + def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover + super().__init__(*args, **kw) + + self._task: Optional[asyncio.Task[None]] = None + self.exit_code = 0 + self._notify_waiter: Optional[asyncio.Future[bool]] = None + + def init_process(self) -> None: + # create new event_loop after fork + asyncio.get_event_loop().close() + + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + super().init_process() + + def run(self) -> None: + self._task = self.loop.create_task(self._run()) + + try: # ignore all finalization problems + self.loop.run_until_complete(self._task) + except Exception: + self.log.exception("Exception in gunicorn worker") + self.loop.run_until_complete(self.loop.shutdown_asyncgens()) + self.loop.close() + + sys.exit(self.exit_code) + + async def _run(self) -> None: + runner = None + if isinstance(self.wsgi, Application): + app = self.wsgi + elif asyncio.iscoroutinefunction(self.wsgi): + wsgi = await self.wsgi() + if isinstance(wsgi, web.AppRunner): + runner = wsgi + app = runner.app + else: + app = wsgi + else: + raise RuntimeError( + "wsgi app should be either Application or " + "async function returning Application, got {}".format(self.wsgi) + ) + + if runner is None: + access_log = self.log.access_log if self.cfg.accesslog else None + runner = web.AppRunner( + app, + logger=self.log, + keepalive_timeout=self.cfg.keepalive, + access_log=access_log, + access_log_format=self._get_valid_log_format( + self.cfg.access_log_format + ), + ) + await runner.setup() + + ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None + + runner = runner + assert runner is not None + server = runner.server + assert server is not None + for sock in self.sockets: + site = web.SockSite( + runner, + sock, + ssl_context=ctx, + shutdown_timeout=self.cfg.graceful_timeout / 100 * 95, + ) + await site.start() + + # If our parent changed then we shut down. + pid = os.getpid() + try: + while self.alive: # type: ignore[has-type] + self.notify() + + cnt = server.requests_count + if self.cfg.max_requests and cnt > self.cfg.max_requests: + self.alive = False + self.log.info("Max requests, shutting down: %s", self) + + elif pid == os.getpid() and self.ppid != os.getppid(): + self.alive = False + self.log.info("Parent changed, shutting down: %s", self) + else: + await self._wait_next_notify() + except BaseException: + pass + + await runner.cleanup() + + def _wait_next_notify(self) -> "asyncio.Future[bool]": + self._notify_waiter_done() + + loop = self.loop + assert loop is not None + self._notify_waiter = waiter = loop.create_future() + self.loop.call_later(1.0, self._notify_waiter_done, waiter) + + return waiter + + def _notify_waiter_done( + self, waiter: Optional["asyncio.Future[bool]"] = None + ) -> None: + if waiter is None: + waiter = self._notify_waiter + if waiter is not None: + set_result(waiter, True) + + if waiter is self._notify_waiter: + self._notify_waiter = None + + def init_signals(self) -> None: + # Set up signals through the event loop API. + + self.loop.add_signal_handler( + signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None + ) + + self.loop.add_signal_handler( + signal.SIGTERM, self.handle_exit, signal.SIGTERM, None + ) + + self.loop.add_signal_handler( + signal.SIGINT, self.handle_quit, signal.SIGINT, None + ) + + self.loop.add_signal_handler( + signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None + ) + + self.loop.add_signal_handler( + signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None + ) + + self.loop.add_signal_handler( + signal.SIGABRT, self.handle_abort, signal.SIGABRT, None + ) + + # Don't let SIGTERM and SIGUSR1 disturb active requests + # by interrupting system calls + signal.siginterrupt(signal.SIGTERM, False) + signal.siginterrupt(signal.SIGUSR1, False) + # Reset signals so Gunicorn doesn't swallow subprocess return codes + # See: https://github.com/aio-libs/aiohttp/issues/6130 + if sys.version_info < (3, 8): + # Starting from Python 3.8, + # the default child watcher is ThreadedChildWatcher. + # The watcher doesn't depend on SIGCHLD signal, + # there is no need to reset it. + signal.signal(signal.SIGCHLD, signal.SIG_DFL) + + def handle_quit(self, sig: int, frame: FrameType) -> None: + self.alive = False + + # worker_int callback + self.cfg.worker_int(self) + + # wakeup closing process + self._notify_waiter_done() + + def handle_abort(self, sig: int, frame: FrameType) -> None: + self.alive = False + self.exit_code = 1 + self.cfg.worker_abort(self) + sys.exit(1) + + @staticmethod + def _create_ssl_context(cfg: Any) -> "SSLContext": + """Creates SSLContext instance for usage in asyncio.create_server. + + See ssl.SSLSocket.__init__ for more details. + """ + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + + ctx = ssl.SSLContext(cfg.ssl_version) + ctx.load_cert_chain(cfg.certfile, cfg.keyfile) + ctx.verify_mode = cfg.cert_reqs + if cfg.ca_certs: + ctx.load_verify_locations(cfg.ca_certs) + if cfg.ciphers: + ctx.set_ciphers(cfg.ciphers) + return ctx + + def _get_valid_log_format(self, source_format: str) -> str: + if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: + return self.DEFAULT_AIOHTTP_LOG_FORMAT + elif re.search(r"%\([^\)]+\)", source_format): + raise ValueError( + "Gunicorn's style options in form of `%(name)s` are not " + "supported for the log formatting. Please use aiohttp's " + "format specification to configure access log formatting: " + "http://docs.aiohttp.org/en/stable/logging.html" + "#format-specification" + ) + else: + return source_format + + +class GunicornUVLoopWebWorker(GunicornWebWorker): + def init_process(self) -> None: + import uvloop + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup uvloop policy, so that every + # asyncio.get_event_loop() will create an instance + # of uvloop event loop. + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + + super().init_process() + + +class GunicornTokioWebWorker(GunicornWebWorker): + def init_process(self) -> None: # pragma: no cover + import tokio + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup tokio policy, so that every + # asyncio.get_event_loop() will create an instance + # of tokio event loop. + asyncio.set_event_loop_policy(tokio.EventLoopPolicy()) + + super().init_process() diff --git a/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/LICENSE b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/LICENSE new file mode 120000 index 0000000..59071a4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6f/d5/24/3e92dd7f98ec69c7ac377728e74905709ff527a5bf98d6d0263c04f5b6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/METADATA b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/METADATA new file mode 120000 index 0000000..8e322c3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fe/6a/4d/291b38ecbb23780bfb1efd0ba5ec5f1621f507be1e3ad2610c0f30ebe2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/RECORD b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/RECORD new file mode 100644 index 0000000..212eb38 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/RECORD @@ -0,0 +1,12 @@ +aiosignal-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aiosignal-1.2.0.dist-info/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 +aiosignal-1.2.0.dist-info/METADATA,sha256=_mpNKRs47LsjeAv7Hv0LpexfFiH1B74eOtJhDA8w6-I,5500 +aiosignal-1.2.0.dist-info/RECORD,, +aiosignal-1.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +aiosignal-1.2.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +aiosignal-1.2.0.dist-info/direct_url.json,sha256=HJKHnDucPL9GR6IrHuKgaZJGwLz8lLWRfg3YZ54KX3Y,251 +aiosignal-1.2.0.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10 +aiosignal/__init__.py,sha256=MGQyLD_0xNqLqPpBqxPj0CHKQjCIY1UFUK5PtHvCWbU,867 +aiosignal/__init__.pyi,sha256=xeCddYSS8fZAkz8S4HuKSR2IDe3N7RW_LKcXDPPA1Xk,311 +aiosignal/__pycache__/__init__.cpython-38.pyc,, +aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/WHEEL b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/WHEEL new file mode 120000 index 0000000..e14c71b --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7b/0c/04/b9e8a8d42d977874ef4f5ee7f1d6542603afc82582b7459534b0a53fda \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/direct_url.json new file mode 100644 index 0000000..db2d5c9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, "url": "https://files.pythonhosted.org/packages/3b/87/fe94898f2d44a93a35d5aa74671ed28094d80753a1113d68b799fab6dc22/aiosignal-1.2.0-py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/top_level.txt new file mode 120000 index 0000000..c504aae --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal-1.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cf/8e/5a/34e2860ddac8d6ba2a658dc1197436da42453c706655dc2d2d82ed5c2d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiosignal/__init__.py b/venv/lib/python3.8/site-packages/aiosignal/__init__.py new file mode 120000 index 0000000..da3facb --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/30/64/32/2c3ff4c4da8ba8fa41ab13e3d021ca42308863550550ae4fb47bc259b5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiosignal/__init__.pyi b/venv/lib/python3.8/site-packages/aiosignal/__init__.pyi new file mode 120000 index 0000000..8409183 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal/__init__.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c5/e0/9d/758492f1f640933f12e07b8a491d880dedcded15bf2ca7170cf3c0d579 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/aiosignal/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/aiosignal/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..b4e4ae0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/aiosignal/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/aiosignal/py.typed b/venv/lib/python3.8/site-packages/aiosignal/py.typed new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/aiosignal/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/LICENSE b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/LICENSE new file mode 120000 index 0000000..d05fead --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e1/8d/7b/b8f513e2c46bb585c94b585bd30720dd3ccb21ddb0786f72d16658f92c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/METADATA new file mode 120000 index 0000000..238b842 --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/da/97/cc/c7106cb79bd0ed27cccb94c36835347118024906bbc1c0f9788f84c3ef \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/RECORD new file mode 100644 index 0000000..3babddb --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/RECORD @@ -0,0 +1,12 @@ +async_timeout-4.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +async_timeout-4.0.2.dist-info/LICENSE,sha256=4Y17uPUT4sRrtYXJS1hb0wcg3TzLId2weG9y0WZY-Sw,568 +async_timeout-4.0.2.dist-info/METADATA,sha256=2pfMxxBst5vQ7SfMy5TDaDU0cRgCSQa7wcD5eI-Ew-8,4193 +async_timeout-4.0.2.dist-info/RECORD,, +async_timeout-4.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +async_timeout-4.0.2.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +async_timeout-4.0.2.dist-info/direct_url.json,sha256=fchEz9IDpBGqOkwf5O27BGwXF_Fk32n7L53Vw_5vKj0,255 +async_timeout-4.0.2.dist-info/top_level.txt,sha256=9oM4e7Twq8iD_7_Q3Mz0E6GPIB6vJvRFo-UBwUQtBDU,14 +async_timeout-4.0.2.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +async_timeout/__init__.py,sha256=N-JUI_VExhHnO0emkF_-h08dl4HBgOje16N4Ci-W-go,7487 +async_timeout/__pycache__/__init__.cpython-38.pyc,, +async_timeout/py.typed,sha256=tyozzRT1fziXETDxokmuyt6jhOmtjUbnVNJdZcG7ik0,12 diff --git a/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/WHEEL new file mode 120000 index 0000000..e14c71b --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7b/0c/04/b9e8a8d42d977874ef4f5ee7f1d6542603afc82582b7459534b0a53fda \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/direct_url.json new file mode 100644 index 0000000..e462143 --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, "url": "https://files.pythonhosted.org/packages/d6/c1/8991e7c5385b897b8c020cdaad718c5b087a6626d1d11a23e1ea87e325a7/async_timeout-4.0.2-py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/top_level.txt new file mode 120000 index 0000000..83c3f15 --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f6/83/38/7bb4f0abc883ffbfd0dcccf413a18f201eaf26f445a3e501c1442d0435 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/zip-safe b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/zip-safe new file mode 120000 index 0000000..e11f061 --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout-4.0.2.dist-info/zip-safe @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/01/ba/47/19c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/async_timeout/__init__.py b/venv/lib/python3.8/site-packages/async_timeout/__init__.py new file mode 120000 index 0000000..6e7cc21 --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/37/e2/54/23f544c611e73b47a6905ffe874f1d9781c180e8ded7a3780a2f96fa0a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/async_timeout/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/async_timeout/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..bcb2aad Binary files /dev/null and b/venv/lib/python3.8/site-packages/async_timeout/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/async_timeout/py.typed b/venv/lib/python3.8/site-packages/async_timeout/py.typed new file mode 120000 index 0000000..e0b8f5a --- /dev/null +++ b/venv/lib/python3.8/site-packages/async_timeout/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b7/2a/33/cd14f57f38971130f1a249aecadea384e9ad8d46e754d25d65c1bb8a4d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attr/__init__.py b/venv/lib/python3.8/site-packages/attr/__init__.py new file mode 100644 index 0000000..386305d --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/__init__.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: MIT + + +import sys + +from functools import partial + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._version_info import VersionInfo + + +__version__ = "22.1.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + +__all__ = [ + "Attribute", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "evolve", + "exceptions", + "fields", + "fields_dict", + "filters", + "get_run_validators", + "has", + "ib", + "make_class", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + +if sys.version_info[:2] >= (3, 6): + from ._next_gen import define, field, frozen, mutable # noqa: F401 + + __all__.extend(("define", "field", "frozen", "mutable")) diff --git a/venv/lib/python3.8/site-packages/attr/__init__.pyi b/venv/lib/python3.8/site-packages/attr/__init__.pyi new file mode 100644 index 0000000..03cc4c8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/__init__.pyi @@ -0,0 +1,486 @@ +import sys + +from typing import ( + Any, + Callable, + ClassVar, + Dict, + Generic, + List, + Mapping, + Optional, + Protocol, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._cmp import cmp_using as cmp_using +from ._version_info import VersionInfo + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, Attribute[_T], _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[[Attribute[_T], _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List[Attribute[Any]]], List[Attribute[Any]] +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# A protocol to be able to statically accept an attrs class. +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + +# _make -- + +NOTHING: object + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +# Static type inference support via __dataclass_transform__ implemented as per: +# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md +# This annotation must be applied to all overloads of "define" and "attrs" +# +# NOTE: This is a typing construct and does not exist at runtime. Extensions +# wrapping attrs decorators should declare a separate __dataclass_transform__ +# signature in the extension module using the specification linked above to +# provide pyright support. +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +def fields(cls: Type[AttrsInstance]) -> Any: ... +def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ... +def validate(inst: AttrsInstance) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> bool: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..c659529 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/_cmp.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/_cmp.cpython-38.pyc new file mode 100644 index 0000000..15b4223 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/_cmp.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/_compat.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000..61e16d3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/_compat.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/_config.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/_config.cpython-38.pyc new file mode 100644 index 0000000..eea44d5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/_config.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/_funcs.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/_funcs.cpython-38.pyc new file mode 100644 index 0000000..e234802 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/_funcs.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/_make.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/_make.cpython-38.pyc new file mode 100644 index 0000000..e8b3165 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/_make.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/_next_gen.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/_next_gen.cpython-38.pyc new file mode 100644 index 0000000..e0f44b6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/_next_gen.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/_version_info.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/_version_info.cpython-38.pyc new file mode 100644 index 0000000..9b7a445 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/_version_info.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/converters.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/converters.cpython-38.pyc new file mode 100644 index 0000000..c420e4b Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/converters.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..34250bd Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/filters.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/filters.cpython-38.pyc new file mode 100644 index 0000000..4a9c7b3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/filters.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/setters.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/setters.cpython-38.pyc new file mode 100644 index 0000000..083d12d Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/setters.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/__pycache__/validators.cpython-38.pyc b/venv/lib/python3.8/site-packages/attr/__pycache__/validators.cpython-38.pyc new file mode 100644 index 0000000..c4cd0b9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attr/__pycache__/validators.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attr/_cmp.py b/venv/lib/python3.8/site-packages/attr/_cmp.py new file mode 100644 index 0000000..81b99e4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/_cmp.py @@ -0,0 +1,155 @@ +# SPDX-License-Identifier: MIT + + +import functools +import types + +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and + ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if + at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + :param Optional[callable] eq: `callable` used to evaluate equality + of two objects. + :param Optional[callable] lt: `callable` used to evaluate whether + one object is less than another object. + :param Optional[callable] le: `callable` used to evaluate whether + one object is less than or equal to another object. + :param Optional[callable] gt: `callable` used to evaluate whether + one object is greater than another object. + :param Optional[callable] ge: `callable` used to evaluate whether + one object is greater than or equal to another object. + + :param bool require_same_type: When `True`, equality and ordering methods + will return `NotImplemented` if objects are not of the same type. + + :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = types.new_class( + class_name, (object,), {}, lambda ns: ns.update(body) + ) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + raise ValueError( + "eq must be define is order to complete ordering from " + "lt, le, gt, ge." + ) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = "__%s__" % (name,) + method.__doc__ = "Return a %s b. Computed by attrs." % ( + _operation_names[name], + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + for func in self._requirements: + if not func(self, other): + return False + return True + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/venv/lib/python3.8/site-packages/attr/_cmp.pyi b/venv/lib/python3.8/site-packages/attr/_cmp.pyi new file mode 100644 index 0000000..35437ef --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Any, Callable, Optional, Type + +_CompareWithType = Callable[[Any, Any], bool] + +def cmp_using( + eq: Optional[_CompareWithType], + lt: Optional[_CompareWithType], + le: Optional[_CompareWithType], + gt: Optional[_CompareWithType], + ge: Optional[_CompareWithType], + require_same_type: bool, + class_name: str, +) -> Type: ... diff --git a/venv/lib/python3.8/site-packages/attr/_compat.py b/venv/lib/python3.8/site-packages/attr/_compat.py new file mode 100644 index 0000000..5826493 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/_compat.py @@ -0,0 +1,185 @@ +# SPDX-License-Identifier: MIT + + +import inspect +import platform +import sys +import threading +import types +import warnings + +from collections.abc import Mapping, Sequence # noqa + + +PYPY = platform.python_implementation() == "PyPy" +PY36 = sys.version_info[:2] >= (3, 6) +HAS_F_STRINGS = PY36 +PY310 = sys.version_info[:2] >= (3, 10) + + +if PYPY or PY36: + ordered_dict = dict +else: + from collections import OrderedDict + + ordered_dict = OrderedDict + + +def just_warn(*args, **kw): + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + +class _AnnotationExtractor: + """ + Extract type annotations from a callable, returning None whenever there + is none. + """ + + __slots__ = ["sig"] + + def __init__(self, callable): + try: + self.sig = inspect.signature(callable) + except (ValueError, TypeError): # inspect failed + self.sig = None + + def get_first_param_type(self): + """ + Return the type annotation of the first argument if it's not empty. + """ + if not self.sig: + return None + + params = list(self.sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + return params[0].annotation + + return None + + def get_return_type(self): + """ + Return the return type if it's not empty. + """ + if ( + self.sig + and self.sig.return_annotation is not inspect.Signature.empty + ): + return self.sig.return_annotation + + return None + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + + def set_closure_cell(cell, value): + cell.cell_contents = value + + else: + args = [co.co_argcount] + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() diff --git a/venv/lib/python3.8/site-packages/attr/_config.py b/venv/lib/python3.8/site-packages/attr/_config.py new file mode 100644 index 0000000..96d4200 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/_config.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: MIT + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/venv/lib/python3.8/site-packages/attr/_funcs.py b/venv/lib/python3.8/site-packages/attr/_funcs.py new file mode 100644 index 0000000..a982d7c --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/_funcs.py @@ -0,0 +1,420 @@ +# SPDX-License-Identifier: MIT + + +import copy + +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in v.items() + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in val.items() + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in v.items() + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in changes.items(): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + "{k} is not an attrs attribute on {cl}.".format( + k=k, cl=new.__class__ + ) + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = attr_name if attr_name[0] != "_" else attr_name[1:] + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None, attribs=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + :param Optional[list] attribs: List of attribs for the given class. + This is necessary when calling from inside a ``field_transformer`` + since *cls* is not an ``attrs`` class yet. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class and you didn't pass any attribs. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/venv/lib/python3.8/site-packages/attr/_make.py b/venv/lib/python3.8/site-packages/attr/_make.py new file mode 100644 index 0000000..4d1afe3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/_make.py @@ -0,0 +1,3006 @@ +# SPDX-License-Identifier: MIT + +import copy +import linecache +import sys +import types +import typing + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + HAS_F_STRINGS, + PY310, + PYPY, + _AnnotationExtractor, + ordered_dict, + set_closure_cell, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_{}" +_tuple_property_pat = ( + " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" +) +_classvar_prefixes = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = types.MappingProxyType({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) + + +class _Nothing: + """ + Sentinel class to indicate the lack of a value when ``None`` is ambiguous. + + ``_Nothing`` is a singleton. There is only ever one of it. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + """ + + _singleton = None + + def __new__(cls): + if _Nothing._singleton is None: + _Nothing._singleton = super().__new__(cls) + return _Nothing._singleton + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + +NOTHING = _Nothing() +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the :func:`~attrs.Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a `bool` or a `callable` to use a custom function. + + :param eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. To override how the attribute value is compared, + pass a ``callable`` that takes a single value and returns the value + to be compared. + :type eq: a `bool` or a `callable`. + + :param order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + To override how the attribute value is ordered, + pass a ``callable`` that takes a single value and returns the value + to be ordered. + :type order: a `bool` or a `callable`. + + :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the + same value. Must not be mixed with *eq* or *order*. + :type cmp: a `bool` or a `callable`. + + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to convert attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending_metadata`. + :param type: The type of the attribute. In Python 3.6 or greater, the + preferred method to specify the type is using a variable annotation + (see :pep:`526`). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + "Exec" the script with the given global (globs) and local (locs) variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs): + """ + Create the method with the script given and return the method object. + """ + locs = {} + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + else: + filename = "{}-{}>".format(base_filename[:-1], count) + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = "{}Attributes".format(cls_name) + attr_class_template = [ + "class {}(tuple):".format(attr_class_name), + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + _tuple_property_pat.format(index=i, attr_name=attr_name) + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + + Requires Python 3. + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _counter_getter(e): + """ + Key function for sorting to avoid re-creating a lambda for every class. + """ + return e[1].counter + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in these.items()] + + if not isinstance(these, ordered_dict): + ca_list.sort(key=_counter_getter) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + "default value or factory. Attribute in question: %r" % (a,) + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +if PYPY: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + +else: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder: + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = {a.name for a in base_attrs} + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + else: + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = _obj_setattr + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in self._cls_dict.items() + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = _obj_setattr + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = dict() + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overridden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in existing_slots.items() + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + cd["__qualname__"] = self._cls.__qualname__ + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . On Python 3, + # if a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return tuple(getattr(self, name) for name in state_attr_names) + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + try: + method.__doc__ = "Method generated by attrs for class %s." % ( + self._cls.__qualname__, + ) + except AttributeError: + pass + + return method + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + A class decorator that adds `dunder + `_\ -methods according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + If *these* is an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the attributes inside *these*. Otherwise the order + of the definition of the attributes is used. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises + an `attrs.exceptions.PythonTooOldError`. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* + and *order* to the same value. Must not be mixed with *eq* or *order*. + :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method + is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the argument + name. If a ``__attrs_pre_init__`` method exists on the class, it will + be called before the class is initialized. If a ``__attrs_post_init__`` + method exists on the class, it will be called after the class is fully + initialized. + + If ``init`` is ``False``, an ``__attrs_init__`` method will be + injected instead. This allows you to define a custom ``__init__`` + method that can do pre-init work such as ``super().__init__()``, + and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. + :param bool slots: Create a `slotted class ` that's more + memory-efficient. Slotted classes are generally superior to the default + dict classes, but have some gotchas you should know about, so we + encourage you to read the `glossary entry `. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated + attributes (Python 3.6 and later only) from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also + works as expected in most cases (see warning below). + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. warning:: + For features that use the attribute name to create decorators (e.g. + `validators `), you still *must* assign `attr.ib` to + them. Otherwise Python will either not find the name or try to use + the default value to call e.g. ``validator`` on it. + + These errors can be quite confusing and probably the most common bug + report on our bug tracker. + + :param bool kw_only: Make all attributes keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatibility. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attrs.setters.pipe`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + :param Optional[callable] field_transformer: + A function that is called with the original class object and all + fields right before ``attrs`` finalizes the class. You can use + this, e.g., to automatically add converters or validators to + fields based on their types. See `transform-fields` for more details. + + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + :pep:`634` (Structural Pattern Matching). It is a tuple of all + non-keyword-only ``__init__`` parameter names on Python 3.10 and later. + Ignored on older Python versions. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + """ + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + hash_ = hash # work around the lack of nonlocal + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + if ( + hash_ is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + else: + hash = hash_ + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + unique_filename = "".format( + func_name, + cls.__module__, + getattr(cls, "__qualname__", cls.__name__), + ) + return unique_filename + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + # If eq is custom generated, we need to include the functions in globs + globs = {} + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + " %d," % (type_hash,), + ] + ) + + for a in attrs: + if a.eq_key: + cmp_name = "_%s_key" % (a.name,) + globs[cmp_name] = a.eq_key + method_lines.append( + indent + " %s(self.%s)," % (cmp_name, a.name) + ) + else: + method_lines.append(indent + " self.%s," % a.name) + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) + if frozen: + append_hash_computation_lines( + "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + "self.%s = " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab + "return self.%s" % _hash_cache_field) + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename, globs) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + if a.eq_key: + cmp_name = "_%s_key" % (a.name,) + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append( + " %s(self.%s)," + % ( + cmp_name, + a.name, + ) + ) + others.append( + " %s(other.%s)," + % ( + cmp_name, + a.name, + ) + ) + else: + lines.append(" self.%s," % (a.name,)) + others.append(" other.%s," % (a.name,)) + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +if HAS_F_STRINGS: + + def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r + for name, r, _ in attr_names_with_reprs + if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name + if i + else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = ( + '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + ) + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + " return f'%s(%s)'" % (cls_name_fragment, repr_fragment), + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + +else: + + def _make_repr(attrs, ns, _): + """ + Make a repr method that includes relevant *attrs*, adding *ns* to the + full name. + """ + + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, repr if a.repr is True else a.repr) + for a in attrs + if a.repr is not False + ) + + def __repr__(self): + """ + Automatically created by attrs. + """ + try: + already_repring = _compat.repr_context.already_repring + except AttributeError: + already_repring = set() + _compat.repr_context.already_repring = already_repring + + if id(self) in already_repring: + return "..." + real_cls = self.__class__ + if ns is None: + class_name = real_cls.__qualname__.rsplit(">.", 1)[-1] + else: + class_name = ns + "." + real_cls.__name__ + + # Since 'self' remains on the stack (i.e.: strongly referenced) + # for the duration of this call, it's safe to depend on id(...) + # stability, and not need to track the instance and therefore + # worry about properties like weakref- or hash-ability. + already_repring.add(id(self)) + try: + result = [class_name, "("] + first = True + for name, attr_repr in attr_names_with_reprs: + if first: + first = False + else: + result.append(", ") + result.extend( + (name, "=", attr_repr(getattr(self, name, NOTHING))) + ) + return "".join(result) + ")" + finally: + already_repring.remove(id(self)) + + return __repr__ + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isinstance(cls, type): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: an ordered dict where keys are attribute names and values are + `attrs.Attribute`\\ s. This will be a `dict` if it's + naturally ordered like on Python 3.6+ or an + :class:`~collections.OrderedDict` otherwise. + + .. versionadded:: 18.1.0 + """ + if not isinstance(cls, type): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return ordered_dict((a.name, a) for a in attrs) + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + pre_init, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + has_cls_on_setattr, + attrs_init, + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_setattr"] = _obj_setattr + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return "_setattr(self, '%s', %s)" % (attr_name, value_var) + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr(self, '%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return "self.%s = %s" % (attr_name, value) + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + has_cls_on_setattr, + attrs_init, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if pre_init: + lines.append("self.__attrs_pre_init__()") + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return "_inst_dict['%s'] = %s" % (attr_name, value_var) + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + arg_name = a.name.lstrip("_") + + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = "%s=NOTHING" % (arg_name,) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append("if %s is not NOTHING:" % (arg_name,)) + + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and a.converter is None: + annotations[arg_name] = a.type + elif a.converter is not None: + # Try to get the type from the converter. + t = _AnnotationExtractor(a.converter).get_first_param_type() + if t: + annotations[arg_name] = t + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append( + " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) + ) + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init__ is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr(self, '%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join("self." + a.name for a in attrs if a.init) + + lines.append("BaseException.__init__(self, %s)" % (vals,)) + + args = ", ".join(args) + if kw_only_args: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) + return ( + """\ +def {init_name}(self, {args}): + {lines} +""".format( + init_name=("__attrs_init__" if attrs_init else "__init__"), + args=args, + lines="\n ".join(lines) if lines else "pass", + ), + names_for_globals, + annotations, + ) + + +class Attribute: + """ + *Read-only* representation of an attribute. + + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self, Attribute) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + types.MappingProxyType(dict(metadata)) # Shallow copy + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict + ) + + # Don't use attr.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attr.evolve` but that function does not work + with ``Attribute``. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + types.MappingProxyType(dict(value)) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr: + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + ) + ) + ( + Attribute( + name="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory: + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the names or attributes inside *attrs*. Otherwise the + order of the definition of the attributes is used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = {a: attrib() for a in attrs} + else: + raise TypeError("attrs argument must be a dict or a list.") + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator: + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if + they have any. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + # Get return type from last converter. + rt = _AnnotationExtractor(converters[-1]).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt + + return pipe_converter diff --git a/venv/lib/python3.8/site-packages/attr/_next_gen.py b/venv/lib/python3.8/site-packages/attr/_next_gen.py new file mode 100644 index 0000000..5a06a74 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/_next_gen.py @@ -0,0 +1,220 @@ +# SPDX-License-Identifier: MIT + +""" +These are Python 3.6+-only and keyword-only APIs that call `attr.s` and +`attr.ib` with different default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + Define an ``attrs`` class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` (c.f. + *auto_attribs* parameter). + - If *frozen* is `False`, run converters and validators when setting an + attribute by default. + - *slots=True* + + .. caution:: + + Usually this has only upsides and few visible effects in everyday + programming. But it *can* lead to some suprising behaviors, so please + make sure to read :term:`slotted classes`. + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s + are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.fields`\ s. + + For now, please refer to `attr.s` for the rest of the parameters. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _ng_default_on_setattr + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/venv/lib/python3.8/site-packages/attr/_version_info.py b/venv/lib/python3.8/site-packages/attr/_version_info.py new file mode 100644 index 0000000..51a1312 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/_version_info.py @@ -0,0 +1,86 @@ +# SPDX-License-Identifier: MIT + + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo: + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/venv/lib/python3.8/site-packages/attr/_version_info.pyi b/venv/lib/python3.8/site-packages/attr/_version_info.pyi new file mode 120000 index 0000000..3602b5f --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/_version_info.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c7/f3/37/2f75ae07baff50b5c05a3c7de7db9d290e072c1f25fa1b1cd450c636f9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attr/converters.py b/venv/lib/python3.8/site-packages/attr/converters.py new file mode 100644 index 0000000..a73626c --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/converters.py @@ -0,0 +1,144 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + + +import typing + +from ._compat import _AnnotationExtractor +from ._make import NOTHING, Factory, pipe + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + Type annotations will be inferred from the wrapped converter's, if it + has any. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + xtr = _AnnotationExtractor(converter) + + t = xtr.get_first_param_type() + if t: + optional_converter.__annotations__["val"] = typing.Optional[t] + + rt = xtr.get_return_type() + if rt: + optional_converter.__annotations__["return"] = typing.Optional[rt] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attrs.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes no parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + raise ValueError("Cannot convert value to bool: {}".format(val)) diff --git a/venv/lib/python3.8/site-packages/attr/converters.pyi b/venv/lib/python3.8/site-packages/attr/converters.pyi new file mode 120000 index 0000000..2e6f6b7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/converters.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/31/0a/3b/884ccf355a05a4aa83df4b15c2056974da08792085da7b17be8c4b67e6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attr/exceptions.py b/venv/lib/python3.8/site-packages/attr/exceptions.py new file mode 100644 index 0000000..5dc51e0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/exceptions.py @@ -0,0 +1,92 @@ +# SPDX-License-Identifier: MIT + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an ``attrs`` feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/venv/lib/python3.8/site-packages/attr/exceptions.pyi b/venv/lib/python3.8/site-packages/attr/exceptions.pyi new file mode 120000 index 0000000..7c64925 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/exceptions.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cd/9a/bc/6c2527280cbd983b41130e366613e1014207a1329e7434089c90fcf373 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attr/filters.py b/venv/lib/python3.8/site-packages/attr/filters.py new file mode 100644 index 0000000..baa25e9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/filters.py @@ -0,0 +1,51 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attr.asdict`. +""" + +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Include *what*. + + :param what: What to include. + :type what: `list` of `type` or `attrs.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Exclude *what*. + + :param what: What to exclude. + :type what: `list` of classes or `attrs.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/venv/lib/python3.8/site-packages/attr/filters.pyi b/venv/lib/python3.8/site-packages/attr/filters.pyi new file mode 120000 index 0000000..056fadb --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/filters.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fd/29/bc/d231b24844d7fc2973764a27e4d1d58d059197763796240a657d5ccd77 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attr/py.typed b/venv/lib/python3.8/site-packages/attr/py.typed new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attr/setters.py b/venv/lib/python3.8/site-packages/attr/setters.py new file mode 100644 index 0000000..12ed675 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/setters.py @@ -0,0 +1,73 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# autodata stopped working, so the docstring is inlined in the API docs. +NO_OP = object() diff --git a/venv/lib/python3.8/site-packages/attr/setters.pyi b/venv/lib/python3.8/site-packages/attr/setters.pyi new file mode 120000 index 0000000..23e22c6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/setters.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ed/d3/35/d2baa94150d6d32fa22549eaf2b69b74ee56c7649ba392f035ad085e5d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attr/validators.py b/venv/lib/python3.8/site-packages/attr/validators.py new file mode 100644 index 0000000..eece517 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/validators.py @@ -0,0 +1,594 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + + +import operator +import re + +from contextlib import contextmanager + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .exceptions import NotCallableError + + +try: + Pattern = re.Pattern +except AttributeError: # Python <3.7 lacks a Pattern type. + Pattern = type(re.compile("")) + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "min_len", + "optional", + "provides", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator: + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {pattern!r}" + " ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ), + attr, + self.pattern, + value, + ) + + def __repr__(self): + return "".format( + pattern=self.pattern + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param regex: a regex string or precompiled pattern to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call. Valid options + are `re.fullmatch`, `re.search`, and `re.match`; the default ``None`` + means `re.fullmatch`. For performance reasons, the pattern is always + precompiled using `re.compile`. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + valid_funcs = (re.fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of {}.".format( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ) + ) + ) + + if isinstance(regex, Pattern): + if flags: + raise TypeError( + "'flags' can only be used with a string pattern; " + "pass flags to re.compile() instead" + ) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + match_func = pattern.fullmatch + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator: + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator: + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator: + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ), + attr, + self.options, + value, + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attrs.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + .. versionchanged:: 22.1.0 + The ValueError was incomplete until now and only contained the human + readable error message. Now it contains all the information that has + been promised since 17.1.0. + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator: + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attrs.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable: + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else " {iterable!r}".format(iterable=self.iterable_validator) + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator(s) to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + if isinstance(member_validator, (list, tuple)): + member_validator = and_(*member_validator) + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping: + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator: + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + raise ValueError( + "'{name}' must be {op} {bound}: {value}".format( + name=attr.name, + op=self.compare_op, + bound=self.bound, + value=value, + ) + ) + + def __repr__(self): + return "".format( + op=self.compare_op, bound=self.bound + ) + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator: + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + raise ValueError( + "Length of '{name}' must be <= {max}: {len}".format( + name=attr.name, max=self.max_length, len=len(value) + ) + ) + + def __repr__(self): + return "".format(max=self.max_length) + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) + + +@attrs(repr=False, frozen=True, slots=True) +class _MinLengthValidator: + min_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) < self.min_length: + raise ValueError( + "Length of '{name}' must be => {min}: {len}".format( + name=attr.name, min=self.min_length, len=len(value) + ) + ) + + def __repr__(self): + return "".format(min=self.min_length) + + +def min_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is shorter than *length*. + + :param int length: Minimum length of the string or iterable + + .. versionadded:: 22.1.0 + """ + return _MinLengthValidator(length) diff --git a/venv/lib/python3.8/site-packages/attr/validators.pyi b/venv/lib/python3.8/site-packages/attr/validators.pyi new file mode 100644 index 0000000..54b9dba --- /dev/null +++ b/venv/lib/python3.8/site-packages/attr/validators.pyi @@ -0,0 +1,80 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType +from . import _ValidatorArgType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorArgType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... +def min_len(length: int) -> _ValidatorType[_T]: ... diff --git a/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/AUTHORS.rst b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/AUTHORS.rst new file mode 100644 index 0000000..aa677e8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/AUTHORS.rst @@ -0,0 +1,11 @@ +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? diff --git a/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/LICENSE b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/LICENSE new file mode 100644 index 0000000..2bd6453 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack and the attrs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/METADATA b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/METADATA new file mode 100644 index 0000000..60b6653 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/METADATA @@ -0,0 +1,240 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 22.1.0 +Summary: Classes Without Boilerplate +Home-page: https://www.attrs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +Maintainer: Hynek Schlawack +Maintainer-email: hs@ox.cx +License: MIT +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues +Project-URL: Source Code, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Project-URL: Ko-fi, https://ko-fi.com/the_hynek +Keywords: class,attribute,boilerplate +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.5 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: AUTHORS.rst +Provides-Extra: dev +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'dev' +Requires-Dist: hypothesis ; extra == 'dev' +Requires-Dist: pympler ; extra == 'dev' +Requires-Dist: pytest (>=4.3.0) ; extra == 'dev' +Requires-Dist: mypy (!=0.940,>=0.900) ; extra == 'dev' +Requires-Dist: pytest-mypy-plugins ; extra == 'dev' +Requires-Dist: zope.interface ; extra == 'dev' +Requires-Dist: furo ; extra == 'dev' +Requires-Dist: sphinx ; extra == 'dev' +Requires-Dist: sphinx-notfound-page ; extra == 'dev' +Requires-Dist: pre-commit ; extra == 'dev' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'dev' +Provides-Extra: docs +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Requires-Dist: sphinx-notfound-page ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests' +Requires-Dist: hypothesis ; extra == 'tests' +Requires-Dist: pympler ; extra == 'tests' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests' +Requires-Dist: mypy (!=0.940,>=0.900) ; extra == 'tests' +Requires-Dist: pytest-mypy-plugins ; extra == 'tests' +Requires-Dist: zope.interface ; extra == 'tests' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests' +Provides-Extra: tests_no_zope +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests_no_zope' +Requires-Dist: hypothesis ; extra == 'tests_no_zope' +Requires-Dist: pympler ; extra == 'tests_no_zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope' +Requires-Dist: mypy (!=0.940,>=0.900) ; extra == 'tests_no_zope' +Requires-Dist: pytest-mypy-plugins ; extra == 'tests_no_zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' + + +.. image:: https://www.attrs.org/en/stable/_static/attrs_logo.png + :alt: attrs logo + :align: center + + +``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder methods `_). +`Trusted by NASA `_ for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + +.. teaser-end + +For that, it gives you a class decorator and a way to declaratively define the attributes on that class: + +.. -code-begin- + +.. code-block:: pycon + + >>> from attrs import asdict, define, make_class, Factory + + >>> @define + ... class SomeClass: + ... a_number: int = 42 + ... list_of_numbers: list[int] = Factory(list) + ... + ... def hard_math(self, another_number): + ... return self.a_number + sum(self.list_of_numbers) * another_number + + + >>> sc = SomeClass(1, [1, 2, 3]) + >>> sc + SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + + >>> sc.hard_math(3) + 19 + >>> sc == SomeClass(1, [1, 2, 3]) + True + >>> sc != SomeClass(2, [3, 2, 1]) + True + + >>> asdict(sc) + {'a_number': 1, 'list_of_numbers': [1, 2, 3]} + + >>> SomeClass() + SomeClass(a_number=42, list_of_numbers=[]) + + >>> C = make_class("C", ["a", "b"]) + >>> C("foo", "bar") + C(a='foo', b='bar') + + +After *declaring* your attributes, ``attrs`` gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable ``__repr__``, +- equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +**Hate type annotations**!? +No problem! +Types are entirely **optional** with ``attrs``. +Simply assign ``attrs.field()`` to the attributes instead of annotating them with types. + +---- + +This example uses ``attrs``'s modern APIs that have been introduced in version 20.1.0, and the ``attrs`` package import name that has been added in version 21.3.0. +The classic APIs (``@attr.s``, ``attr.ib``, plus their serious-business aliases) and the ``attr`` package import name will remain **indefinitely**. + +Please check out `On The Core API Names `_ for a more in-depth explanation. + + +Data Classes +============ + +On the tin, ``attrs`` might remind you of ``dataclasses`` (and indeed, ``dataclasses`` `are a descendant `_ of ``attrs``). +In practice it does a lot more and is more flexible. +For instance it allows you to define `special handling of NumPy arrays for equality checks `_, or allows more ways to `plug into the initialization process `_. + +For more details, please refer to our `comparison page `_. + +.. -project-information- + +Project Information +=================== + +- **License**: `MIT `_ +- **PyPI**: https://pypi.org/project/attrs/ +- **Source Code**: https://github.com/python-attrs/attrs +- **Documentation**: https://www.attrs.org/ +- **Changelog**: https://www.attrs.org/en/stable/changelog.html +- **Get Help**: please use the ``python-attrs`` tag on `StackOverflow `_ +- **Third-party Extensions**: https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs +- **Supported Python Versions**: 3.5 and later (last 2.7-compatible release is `21.4.0 `_) + +If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide `_ to get you started! + + +``attrs`` for Enterprise +------------------------ + +Available as part of the Tidelift Subscription. + +The maintainers of ``attrs`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +`Learn more. `_ + + +Release Information +=================== + +22.1.0 (2022-07-28) +------------------- + +Backwards-incompatible Changes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Python 2.7 is not supported anymore. + + Dealing with Python 2.7 tooling has become too difficult for a volunteer-run project. + + We have supported Python 2 more than 2 years after it was officially discontinued and feel that we have paid our dues. + All version up to 21.4.0 from December 2021 remain fully functional, of course. + `#936 `_ +- The deprecated ``cmp`` attribute of ``attrs.Attribute`` has been removed. + This does not affect the *cmp* argument to ``attr.s`` that can be used as a shortcut to set *eq* and *order* at the same time. + `#939 `_ + + +Changes +^^^^^^^ + +- Instantiation of frozen slotted classes is now faster. + `#898 `_ +- If an ``eq`` key is defined, it is also used before hashing the attribute. + `#909 `_ +- Added ``attrs.validators.min_len()``. + `#916 `_ +- ``attrs.validators.deep_iterable()``'s *member_validator* argument now also accepts a list of validators and wraps them in an ``attrs.validators.and_()``. + `#925 `_ +- Added missing type stub re-imports for ``attrs.converters`` and ``attrs.filters``. + `#931 `_ +- Added missing stub for ``attr(s).cmp_using()``. + `#949 `_ +- ``attrs.validators._in()``'s ``ValueError`` is not missing the attribute, expected options, and the value it got anymore. + `#951 `_ +- Python 3.11 is now officially supported. + `#969 `_ + +`Full changelog `_. + +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? diff --git a/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/RECORD b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/RECORD new file mode 100644 index 0000000..6f3ac14 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/RECORD @@ -0,0 +1,58 @@ +attr/__init__.py,sha256=KZjj88xCd_tH-W59HR1EvHiYAUi4Zd1dzOxx8P77jeI,1602 +attr/__init__.pyi,sha256=t-1r-I1VnyxFrqic__QxIk1HUc3ag53L2b8lv0xDZTw,15137 +attr/__pycache__/__init__.cpython-38.pyc,, +attr/__pycache__/_cmp.cpython-38.pyc,, +attr/__pycache__/_compat.cpython-38.pyc,, +attr/__pycache__/_config.cpython-38.pyc,, +attr/__pycache__/_funcs.cpython-38.pyc,, +attr/__pycache__/_make.cpython-38.pyc,, +attr/__pycache__/_next_gen.cpython-38.pyc,, +attr/__pycache__/_version_info.cpython-38.pyc,, +attr/__pycache__/converters.cpython-38.pyc,, +attr/__pycache__/exceptions.cpython-38.pyc,, +attr/__pycache__/filters.cpython-38.pyc,, +attr/__pycache__/setters.cpython-38.pyc,, +attr/__pycache__/validators.cpython-38.pyc,, +attr/_cmp.py,sha256=Mmqj-6w71g_vx0TTLLkU4pbmv28qz_FyBGcUb1HX9ZE,4102 +attr/_cmp.pyi,sha256=cSlVvIH4As2NlDIoLglPEbSrBMWVVTpwExVDDBH6pn8,357 +attr/_compat.py,sha256=Qr9kZOu95Og7joPaQiXoPb54epKqxNU39Xu0u4QVGZI,5568 +attr/_config.py,sha256=5W8lgRePuIOWu1ZuqF1899e2CmXGc95-ipwTpF1cEU4,826 +attr/_funcs.py,sha256=XTFKZtd5zxsUvWocBw7VAswTxH-nFHk0H8gJ2JQkxD4,14645 +attr/_make.py,sha256=Srxbhb8kB17T6nP9e_dgcXY72zda9xfL5uJzva6LExY,97569 +attr/_next_gen.py,sha256=N0Gb5WdBHfcfQhcUsLAc_2ZYzl0JtAX1NOHuDgvkecE,5882 +attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=TWCfmCAxk8s2tgTSYnyQv9MRDPf1pk8Rj16KO_Xqe1c,3610 +attr/converters.pyi,sha256=MQo7iEzPNVoFpKqD30sVwgVpdNoIeSCF2nsXvoxLZ-Y,416 +attr/exceptions.py,sha256=ZGEMLv0CDY1TOtj49OF84myejOn-LCCXAKGIKalKkVU,1915 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=aZep54h8-4ZYV5lmZ3Dx2mqeQH4cMx6tuCmCylLNbEU,1038 +attr/filters.pyi,sha256=_Sm80jGySETX_Clzdkon5NHVjQWRl3Y3liQKZX1czXc,215 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=pbCZQ-pE6ZxjDqZfWWUhUFefXtpekIU4qS_YDMLPQ50,1400 +attr/setters.pyi,sha256=7dM10rqpQVDW0y-iJUnq8rabdO5Wx2Sbo5LwNa0IXl0,573 +attr/validators.py,sha256=cpOHMNSt02ApbTQtQAwBTMeWZqp0u_sx-e3xH-jTyR4,16793 +attr/validators.pyi,sha256=6ngbvnWnFSkbf5J2dK86pq2xpEtrwzWTKfJ4aUvSIlk,2355 +attrs-22.1.0.dist-info/AUTHORS.rst,sha256=jau5p7JMPbBJeCCpGBWsRj8zpxUVAhpyoHFJRfjM888,743 +attrs-22.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-22.1.0.dist-info/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 +attrs-22.1.0.dist-info/METADATA,sha256=vwSMK_BbEgVHrgFWpj3iW0PISTMPHzi6qham9jg7LtA,11015 +attrs-22.1.0.dist-info/RECORD,, +attrs-22.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs-22.1.0.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +attrs-22.1.0.dist-info/direct_url.json,sha256=teRw1NijAiOv3snAnhIsnPKXmp_ghC3u4Xmru-5haxo,252 +attrs-22.1.0.dist-info/top_level.txt,sha256=AGbmKnOtYpdkLRsDRQVSBIwfL32pAQ6BSo1mt-BxI7M,11 +attrs/__init__.py,sha256=CeyxLGVViAEKKsLOLaif8vF3vs1a28vsrRVLv7eMEgM,1109 +attrs/__init__.pyi,sha256=vuFxNbulP9Q7hfpO6Lb5A-_0mbEJOiwYyefjzXMqVfs,2100 +attrs/__pycache__/__init__.cpython-38.pyc,, +attrs/__pycache__/converters.cpython-38.pyc,, +attrs/__pycache__/exceptions.cpython-38.pyc,, +attrs/__pycache__/filters.cpython-38.pyc,, +attrs/__pycache__/setters.cpython-38.pyc,, +attrs/__pycache__/validators.cpython-38.pyc,, +attrs/converters.py,sha256=fCBEdlYWcmI3sCnpUk2pz22GYtXzqTkp6NeOpdI64PY,70 +attrs/exceptions.py,sha256=SlDli6AY77f6ny-H7oy98OkQjsrw-D_supEuErIVYkE,70 +attrs/filters.py,sha256=dc_dNey29kH6KLU1mT2Dakq7tZ3kBfzEGwzOmDzw1F8,67 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=oKw51C72Hh45wTwYvDHJP9kbicxiMhMR4Y5GvdpKdHQ,67 +attrs/validators.py,sha256=4ag1SyVD2Hm3PYKiNG_NOtR_e7f81Hr6GiNl4YvXo4Q,70 diff --git a/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/WHEEL b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/WHEEL new file mode 120000 index 0000000..ae483c4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cf/d8/f4/c406bf26650a3299b3ef62b464600b48cfe7fb04159866e5797c765478 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/direct_url.json new file mode 100644 index 0000000..497c63f --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, "url": "https://files.pythonhosted.org/packages/f2/bc/d817287d1aa01878af07c19505fafd1165cd6a119e9d0821ca1d1c20312d/attrs-22.1.0-py2.py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/top_level.txt new file mode 120000 index 0000000..c240805 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs-22.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/00/66/e6/2a73ad6297642d1b03450552048c1f2f7da9010e814a8d66b7e07123b3 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attrs/__init__.py b/venv/lib/python3.8/site-packages/attrs/__init__.py new file mode 120000 index 0000000..009162e --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/09/ec/b1/2c655588010a2ac2ce2da89ff2f177becd5adbcbecad154bbfb78c1203 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attrs/__init__.pyi b/venv/lib/python3.8/site-packages/attrs/__init__.pyi new file mode 100644 index 0000000..fc44de4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs/__init__.pyi @@ -0,0 +1,66 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import cmp_using as cmp_using +from attr import converters as converters +from attr import define as define +from attr import evolve as evolve +from attr import exceptions as exceptions +from attr import Factory as Factory +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import filters as filters +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/venv/lib/python3.8/site-packages/attrs/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/attrs/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4b817cd Binary files /dev/null and b/venv/lib/python3.8/site-packages/attrs/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attrs/__pycache__/converters.cpython-38.pyc b/venv/lib/python3.8/site-packages/attrs/__pycache__/converters.cpython-38.pyc new file mode 100644 index 0000000..42c6d62 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attrs/__pycache__/converters.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attrs/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/attrs/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..fa11539 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attrs/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attrs/__pycache__/filters.cpython-38.pyc b/venv/lib/python3.8/site-packages/attrs/__pycache__/filters.cpython-38.pyc new file mode 100644 index 0000000..f9740f2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attrs/__pycache__/filters.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attrs/__pycache__/setters.cpython-38.pyc b/venv/lib/python3.8/site-packages/attrs/__pycache__/setters.cpython-38.pyc new file mode 100644 index 0000000..8679784 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attrs/__pycache__/setters.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attrs/__pycache__/validators.cpython-38.pyc b/venv/lib/python3.8/site-packages/attrs/__pycache__/validators.cpython-38.pyc new file mode 100644 index 0000000..08c3760 Binary files /dev/null and b/venv/lib/python3.8/site-packages/attrs/__pycache__/validators.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/attrs/converters.py b/venv/lib/python3.8/site-packages/attrs/converters.py new file mode 120000 index 0000000..873dcf9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs/converters.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7c/20/44/765616726237b029e9524da9cf6d8662d5f3a93929e8d78ea5d23ae0f6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attrs/exceptions.py b/venv/lib/python3.8/site-packages/attrs/exceptions.py new file mode 120000 index 0000000..a43009f --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs/exceptions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4a/50/e5/8ba018efb7fa9f2f87ee8cbdf0e9108ecaf0f83fecba912e12b2156241 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attrs/filters.py b/venv/lib/python3.8/site-packages/attrs/filters.py new file mode 120000 index 0000000..1a75b11 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs/filters.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/75/cf/dd/35ecb6f641fa28b535993d836a4abbb59de405fcc41b0cce983cf0d45f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attrs/py.typed b/venv/lib/python3.8/site-packages/attrs/py.typed new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attrs/setters.py b/venv/lib/python3.8/site-packages/attrs/setters.py new file mode 120000 index 0000000..18b258a --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs/setters.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a0/ac/39/d42ef61e1e39c13c18bc31c93fd91b89cc62321311e18e46bdda4a7474 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/attrs/validators.py b/venv/lib/python3.8/site-packages/attrs/validators.py new file mode 120000 index 0000000..dfc3029 --- /dev/null +++ b/venv/lib/python3.8/site-packages/attrs/validators.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e1/a8/35/4b2543d879b73d82a2346fcd3ad47f7bb7fcd47afa1a2365e18bd7a384 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__init__.py b/venv/lib/python3.8/site-packages/cachecontrol/__init__.py new file mode 120000 index 0000000..92fcea0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/86/bc/65/bf7abbba9b1fc8cc3c937810f6f6a005ac75a581d2186a98959d199343 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..3a9d3e4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/_cmd.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/_cmd.cpython-38.pyc new file mode 100644 index 0000000..6de1b15 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/_cmd.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/adapter.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/adapter.cpython-38.pyc new file mode 100644 index 0000000..dc134f0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/adapter.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/cache.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/cache.cpython-38.pyc new file mode 100644 index 0000000..a05be4a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/cache.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/compat.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/compat.cpython-38.pyc new file mode 100644 index 0000000..bf6bb34 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/compat.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/controller.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/controller.cpython-38.pyc new file mode 100644 index 0000000..50475ed Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/controller.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/filewrapper.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/filewrapper.cpython-38.pyc new file mode 100644 index 0000000..3df0d02 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/filewrapper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/heuristics.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/heuristics.cpython-38.pyc new file mode 100644 index 0000000..92e2f50 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/heuristics.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/serialize.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/serialize.cpython-38.pyc new file mode 100644 index 0000000..e2be140 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/serialize.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/wrapper.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/wrapper.cpython-38.pyc new file mode 100644 index 0000000..6a09998 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/__pycache__/wrapper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/_cmd.py b/venv/lib/python3.8/site-packages/cachecontrol/_cmd.py new file mode 120000 index 0000000..07356ad --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/_cmd.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1e/31/49/74680f38bb2f4bfe5ef01bea62bc1e5c98f5811edd4aca820755f6e2ec \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/adapter.py b/venv/lib/python3.8/site-packages/cachecontrol/adapter.py new file mode 120000 index 0000000..44020a5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/adapter.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/76/ef/02/b072adb4058bf7ea56992bf23435731eb1fea9f8923a0a3a7dc6a733ed \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/cache.py b/venv/lib/python3.8/site-packages/cachecontrol/cache.py new file mode 120000 index 0000000..21f91f6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/cache.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4e/dc/b8/e5f3a31f8d1f0a89531a4a8a42f41099b62c32993e9c2c9f2dcbf6bc6e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/caches/__init__.py b/venv/lib/python3.8/site-packages/cachecontrol/caches/__init__.py new file mode 120000 index 0000000..a358aa3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/caches/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/87/ed/5c/5263b3ea684bb234e33ab27c88f7a3a4674b0b21b89734dfb5f199bcb8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/caches/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/caches/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..c12ac83 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/caches/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/caches/__pycache__/file_cache.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/caches/__pycache__/file_cache.cpython-38.pyc new file mode 100644 index 0000000..2e09dcd Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/caches/__pycache__/file_cache.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/caches/__pycache__/redis_cache.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachecontrol/caches/__pycache__/redis_cache.cpython-38.pyc new file mode 100644 index 0000000..1379ad4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachecontrol/caches/__pycache__/redis_cache.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachecontrol/caches/file_cache.py b/venv/lib/python3.8/site-packages/cachecontrol/caches/file_cache.py new file mode 120000 index 0000000..f8f1f99 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/caches/file_cache.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1a/97/b1/704dbd2e863831a6703d44dc50165a0dd72c8eac8bc591739e4f076ebe \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/caches/redis_cache.py b/venv/lib/python3.8/site-packages/cachecontrol/caches/redis_cache.py new file mode 120000 index 0000000..06edcf2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/caches/redis_cache.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6c/62/80/53420e709505994fdb941237c3acca8f52f82327c33e62cdcb38e9ff2e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/compat.py b/venv/lib/python3.8/site-packages/cachecontrol/compat.py new file mode 120000 index 0000000..9c1beff --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/compat.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/24/e5/4a/c8889ba7c9cdab78c06efee75c1b0d3ad5c754428f879bbcaf6a8b195a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/controller.py b/venv/lib/python3.8/site-packages/cachecontrol/controller.py new file mode 120000 index 0000000..a6fdeb4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/controller.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ea/3c/93/e23e0b16c22f7c3792143eb128ad794423889f013636bf1483b202b70c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/filewrapper.py b/venv/lib/python3.8/site-packages/cachecontrol/filewrapper.py new file mode 120000 index 0000000..ee756c2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/filewrapper.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5f/80/40/40e3b6e8634e47b9c7fdf853cc07deb9cb76ac141cc7fd79332141a5cb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/heuristics.py b/venv/lib/python3.8/site-packages/cachecontrol/heuristics.py new file mode 120000 index 0000000..e95f4be --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/heuristics.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f2/40/32/b992d20b2108810afabdb5307e1a6a83da30b3898cd0857a0d66b37af2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/serialize.py b/venv/lib/python3.8/site-packages/cachecontrol/serialize.py new file mode 120000 index 0000000..2253e82 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/serialize.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a9/b3/15/6e75120ee4119495939faf822ab5d84a16949fd7528a0b6fb40688df10 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachecontrol/wrapper.py b/venv/lib/python3.8/site-packages/cachecontrol/wrapper.py new file mode 120000 index 0000000..e85b455 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachecontrol/wrapper.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5f/7f/8a/319db41e8dd5b6ac95697725a5e429173a24479344f2d6527ef295681f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/LICENSE b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/LICENSE new file mode 120000 index 0000000..5ea6000 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/79/d7/6b/bec3899d16482163a365c7f65e87a729993a1d65f960f4181e2ecb81a0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/METADATA b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/METADATA new file mode 120000 index 0000000..55be8fc --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/28/af/5d/83ece7150ca095d946b367a9b1f211bb7edf32055a4023ba562e538c6b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/RECORD b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/RECORD new file mode 100644 index 0000000..b3fa65d --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/RECORD @@ -0,0 +1,53 @@ +cachy-0.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cachy-0.3.0.dist-info/LICENSE,sha256=eddrvsOJnRZIIWOjZcf2XoenKZk6HWX5YPQYHi7LgaA,1062 +cachy-0.3.0.dist-info/METADATA,sha256=KK9dg-znFQygldlGs2epsfIRu37fMgVaQCO6Vi5TjGs,1472 +cachy-0.3.0.dist-info/RECORD,, +cachy-0.3.0.dist-info/WHEEL,sha256=k_UtF7DeTKrUqc0FULNabrogAWgCy8zJZKQiwD2AkC4,89 +cachy/__init__.py,sha256=y41ImV85TJlxPUkY7wNYgAhG2VQEo5_g_03WbczZ5_E,124 +cachy/__pycache__/__init__.cpython-38.pyc,, +cachy/__pycache__/cache_manager.cpython-38.pyc,, +cachy/__pycache__/helpers.cpython-38.pyc,, +cachy/__pycache__/redis_tagged_cache.cpython-38.pyc,, +cachy/__pycache__/repository.cpython-38.pyc,, +cachy/__pycache__/tag_set.cpython-38.pyc,, +cachy/__pycache__/tagged_cache.cpython-38.pyc,, +cachy/__pycache__/utils.cpython-38.pyc,, +cachy/cache_manager.py,sha256=-eL3UqPRy6uTGqyYzZmqGkYt8PrxhIx9ziu8uZIWIWQ,7339 +cachy/contracts/__init__.py,sha256=O9CT1B2F-cVB8elT0EoCJbgkcffjvlmqteqavs4giDg,25 +cachy/contracts/__pycache__/__init__.cpython-38.pyc,, +cachy/contracts/__pycache__/factory.cpython-38.pyc,, +cachy/contracts/__pycache__/repository.cpython-38.pyc,, +cachy/contracts/__pycache__/store.cpython-38.pyc,, +cachy/contracts/__pycache__/taggable_store.cpython-38.pyc,, +cachy/contracts/factory.py,sha256=F6USJ2ooGjTOFNS8gqmKxg8LHK3SZ2RgcbBxQI1QYsE,323 +cachy/contracts/repository.py,sha256=-3eqnXR8j5MiRIPlOYpec1X9DuIYNFbf_pn86XO8oDM,2942 +cachy/contracts/store.py,sha256=oxQWQEwiHIDOQj1b7CGnztrQdeFFHG5iAEVluZ-bAh8,2594 +cachy/contracts/taggable_store.py,sha256=ObQILbeu_CLBKV17r1dn3IJWjoPi4WiNboFKW7JW6RA,497 +cachy/helpers.py,sha256=pDSGH7MD2ARvumlPl6KhycFHIP_QQk5KsWRA6Vfs08U,101 +cachy/redis_tagged_cache.py,sha256=FZ8mmAd0y-QmXc7DCA0W-oJkUT7HgX-xX82QOosJV-E,2171 +cachy/repository.py,sha256=ZZHdMXeUrxvT2PJSmL9hTqeUr7fUkq527UvrZ3aQMyg,8250 +cachy/serializers/__init__.py,sha256=RuQDghzeLKWxWs8YKju6dOp75AsTcookQqOjYj9nfjs,202 +cachy/serializers/__pycache__/__init__.cpython-38.pyc,, +cachy/serializers/__pycache__/json_serializer.cpython-38.pyc,, +cachy/serializers/__pycache__/msgpack_serializer.cpython-38.pyc,, +cachy/serializers/__pycache__/pickle_serializer.cpython-38.pyc,, +cachy/serializers/__pycache__/serializer.cpython-38.pyc,, +cachy/serializers/json_serializer.py,sha256=UNjSMa0auB9cVI-Ru6NB5dtvXKqSQ1Ne66fllOLwxT0,683 +cachy/serializers/msgpack_serializer.py,sha256=UX31MOUri5KwgECKOczoTaWAeJfvVeVQujKXz560xyI,814 +cachy/serializers/pickle_serializer.py,sha256=Hc-OOLQnLfOCvYGdRO3umd2z38MNjz7IUBPLI0Ksecc,848 +cachy/serializers/serializer.py,sha256=d_1G10DNG0Myq4tO43T1RmESyZjRRiHubhKsI21D4no,513 +cachy/stores/__init__.py,sha256=9kcnXzYQl9SU9EOYfMeTZH5bT7H1y2mPZkHYuIahZDk,207 +cachy/stores/__pycache__/__init__.cpython-38.pyc,, +cachy/stores/__pycache__/dict_store.cpython-38.pyc,, +cachy/stores/__pycache__/file_store.cpython-38.pyc,, +cachy/stores/__pycache__/memcached_store.cpython-38.pyc,, +cachy/stores/__pycache__/null_store.cpython-38.pyc,, +cachy/stores/__pycache__/redis_store.cpython-38.pyc,, +cachy/stores/dict_store.py,sha256=Y8QN_yg3yzq6QxaX24Sg62lRLt0qODTV-XdVuosaNpw,3700 +cachy/stores/file_store.py,sha256=5RakbtGquztkcPBtYGg7GQ64R0lG9638zFgwbZ2TcvQ,5806 +cachy/stores/memcached_store.py,sha256=hZ1bMNDqj49eRKN3rCpO80j4XVfm1jbddyFRWIXGXRA,2984 +cachy/stores/null_store.py,sha256=2kYMfN4G9TlrAH7YMx60TT3BmNwfEA2z1AezMJcL_gk,2026 +cachy/stores/redis_store.py,sha256=0qHMDEDXxo0BIwPa1kjrSCJaeFTTipabo4yQTTSzivs,3318 +cachy/tag_set.py,sha256=R7kJUapYsd5zsnQ4MoWL9E9XgCuhUGdHXwcccpJEtwM,1665 +cachy/tagged_cache.py,sha256=A7yl7jVdMyITIM8P3aL-X_OAuEmHQP8XPz2jNMdtbzM,5835 +cachy/utils.py,sha256=w2JpCwjLDE-GpdUyuRnkQA4hXyj-cml9etZ4ReuSQGQ,1309 diff --git a/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/WHEEL b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/WHEEL new file mode 120000 index 0000000..3817540 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy-0.3.0.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/93/f5/2d/17b0de4caad4a9cd0550b35a6eba20016802cbccc964a422c03d80902e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/__init__.py b/venv/lib/python3.8/site-packages/cachy/__init__.py new file mode 120000 index 0000000..64358bd --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cb/8d/48/995f394c99713d4918ef0358800846d95404a39fe0ff4dd66dccd9e7f1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4dafefe Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/__pycache__/cache_manager.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/__pycache__/cache_manager.cpython-38.pyc new file mode 100644 index 0000000..5189d6b Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/__pycache__/cache_manager.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/__pycache__/helpers.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/__pycache__/helpers.cpython-38.pyc new file mode 100644 index 0000000..44494d5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/__pycache__/helpers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/__pycache__/redis_tagged_cache.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/__pycache__/redis_tagged_cache.cpython-38.pyc new file mode 100644 index 0000000..eeec49a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/__pycache__/redis_tagged_cache.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/__pycache__/repository.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/__pycache__/repository.cpython-38.pyc new file mode 100644 index 0000000..c8afca3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/__pycache__/repository.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/__pycache__/tag_set.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/__pycache__/tag_set.cpython-38.pyc new file mode 100644 index 0000000..8391524 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/__pycache__/tag_set.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/__pycache__/tagged_cache.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/__pycache__/tagged_cache.cpython-38.pyc new file mode 100644 index 0000000..acac125 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/__pycache__/tagged_cache.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..3d5de10 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/__pycache__/utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/cache_manager.py b/venv/lib/python3.8/site-packages/cachy/cache_manager.py new file mode 120000 index 0000000..9793e27 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/cache_manager.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f9/e2/f7/52a3d1cbab931aac98cd99aa1a462df0faf1848c7dce2bbcb992162164 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/__init__.py b/venv/lib/python3.8/site-packages/cachy/contracts/__init__.py new file mode 120000 index 0000000..78d3bb4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/contracts/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3b/d0/93/d41d85f9c541f1e953d04a0225b82471f7e3be59aab5ea9abece208838 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..ac87822 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/factory.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/factory.cpython-38.pyc new file mode 100644 index 0000000..405396b Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/factory.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/repository.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/repository.cpython-38.pyc new file mode 100644 index 0000000..72b4c97 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/repository.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/store.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/store.cpython-38.pyc new file mode 100644 index 0000000..8f19188 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/store.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/taggable_store.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/taggable_store.cpython-38.pyc new file mode 100644 index 0000000..9eedd9a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/contracts/__pycache__/taggable_store.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/factory.py b/venv/lib/python3.8/site-packages/cachy/contracts/factory.py new file mode 120000 index 0000000..6def921 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/contracts/factory.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/17/a5/12/276a281a34ce14d4bc82a98ac60f0b1cadd267646071b071408d5062c1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/repository.py b/venv/lib/python3.8/site-packages/cachy/contracts/repository.py new file mode 120000 index 0000000..9e19ec9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/contracts/repository.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fb/77/aa/9d747c8f93224483e5398a5e7355fd0ee2183456dffe99fce973bca033 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/store.py b/venv/lib/python3.8/site-packages/cachy/contracts/store.py new file mode 120000 index 0000000..37700f4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/contracts/store.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a3/14/16/404c221c80ce423d5bec21a7cedad075e1451c6e62004565b99f9b021f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/contracts/taggable_store.py b/venv/lib/python3.8/site-packages/cachy/contracts/taggable_store.py new file mode 120000 index 0000000..6455631 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/contracts/taggable_store.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/39/b4/08/2db7aefc22c1295d7baf5767dc82568e83e2e1688d6e814a5bb256e910 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/helpers.py b/venv/lib/python3.8/site-packages/cachy/helpers.py new file mode 120000 index 0000000..3c52015 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/helpers.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a4/34/86/1fb303d8046fba694f97a2a1c9c14720ffd0424e4ab16440e957ecd3c5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/redis_tagged_cache.py b/venv/lib/python3.8/site-packages/cachy/redis_tagged_cache.py new file mode 120000 index 0000000..2d3dbed --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/redis_tagged_cache.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/15/9f/26/980774cbe4265dcec3080d16fa8264513ec7817fb15fcd903a8b0957e1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/repository.py b/venv/lib/python3.8/site-packages/cachy/repository.py new file mode 120000 index 0000000..f4ee20b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/repository.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/65/91/dd/317794af1bd3d8f25298bf614ea794afb7d492ae76ed4beb6776903328 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/__init__.py b/venv/lib/python3.8/site-packages/cachy/serializers/__init__.py new file mode 120000 index 0000000..961f7cb --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/serializers/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/46/e4/03/821cde2ca5b15acf182a3bba74ea7be40b13728a2442a3a3623f677e3b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..dd61976 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/json_serializer.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/json_serializer.cpython-38.pyc new file mode 100644 index 0000000..b8dafb3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/json_serializer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/msgpack_serializer.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/msgpack_serializer.cpython-38.pyc new file mode 100644 index 0000000..74bb5e9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/msgpack_serializer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/pickle_serializer.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/pickle_serializer.cpython-38.pyc new file mode 100644 index 0000000..6c799ff Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/pickle_serializer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/serializer.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/serializer.cpython-38.pyc new file mode 100644 index 0000000..e0eee82 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/serializers/__pycache__/serializer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/json_serializer.py b/venv/lib/python3.8/site-packages/cachy/serializers/json_serializer.py new file mode 120000 index 0000000..f8d32a9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/serializers/json_serializer.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/50/d8/d2/31ad1ab81f5c548f91bba341e5db6f5caa9243535eeba7e594e2f0c53d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/msgpack_serializer.py b/venv/lib/python3.8/site-packages/cachy/serializers/msgpack_serializer.py new file mode 120000 index 0000000..ce19843 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/serializers/msgpack_serializer.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/51/7d/f5/30e52b8b92b080408a39cce84da5807897ef55e550ba3297cf9eb4c722 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/pickle_serializer.py b/venv/lib/python3.8/site-packages/cachy/serializers/pickle_serializer.py new file mode 120000 index 0000000..e0c045a --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/serializers/pickle_serializer.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1d/cf/8e/38b4272df382bd819d44edee99ddb3dfc30d8f3ec85013cb2342ac79c7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/serializers/serializer.py b/venv/lib/python3.8/site-packages/cachy/serializers/serializer.py new file mode 120000 index 0000000..019c7da --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/serializers/serializer.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/77/fd/46/d740cd1b4332ab8b4ee374f5466112c998d14621ee6e12ac236d43e27a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/stores/__init__.py b/venv/lib/python3.8/site-packages/cachy/stores/__init__.py new file mode 120000 index 0000000..669b15d --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/stores/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f6/47/27/5f361097d494f443987cc793647e5b4fb1f5cb698f6641d8b886a16439 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..0898e1d Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/dict_store.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/dict_store.cpython-38.pyc new file mode 100644 index 0000000..e0d42e3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/dict_store.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/file_store.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/file_store.cpython-38.pyc new file mode 100644 index 0000000..1320629 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/file_store.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/memcached_store.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/memcached_store.cpython-38.pyc new file mode 100644 index 0000000..6db9e55 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/memcached_store.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/null_store.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/null_store.cpython-38.pyc new file mode 100644 index 0000000..0046b04 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/null_store.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/redis_store.cpython-38.pyc b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/redis_store.cpython-38.pyc new file mode 100644 index 0000000..6df63da Binary files /dev/null and b/venv/lib/python3.8/site-packages/cachy/stores/__pycache__/redis_store.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cachy/stores/dict_store.py b/venv/lib/python3.8/site-packages/cachy/stores/dict_store.py new file mode 120000 index 0000000..3a04837 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/stores/dict_store.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/63/c4/0d/ff2837cb3aba431697db84a0eb69512edd2a3834d5f97755ba8b1a369c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/stores/file_store.py b/venv/lib/python3.8/site-packages/cachy/stores/file_store.py new file mode 120000 index 0000000..dad67a2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/stores/file_store.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e5/16/a4/6ed1aabb3b6470f06d60683b190eb8474946f7adfccc58306d9d9372f4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/stores/memcached_store.py b/venv/lib/python3.8/site-packages/cachy/stores/memcached_store.py new file mode 120000 index 0000000..8cb18bf --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/stores/memcached_store.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/85/9d/5b/30d0ea8f8f5e44a377ac2a4ef348f85d57e6d636dd7721515885c65d10 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/stores/null_store.py b/venv/lib/python3.8/site-packages/cachy/stores/null_store.py new file mode 120000 index 0000000..338e0e4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/stores/null_store.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/da/46/0c/7cde06f5396b007ed8331eb44d3dc198dc1f100db3d407b330970bfe09 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/stores/redis_store.py b/venv/lib/python3.8/site-packages/cachy/stores/redis_store.py new file mode 120000 index 0000000..793346b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/stores/redis_store.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d2/a1/cc/0c40d7c68d012303dad648eb48225a7854d38a969ba38c904d34b38afb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/tag_set.py b/venv/lib/python3.8/site-packages/cachy/tag_set.py new file mode 120000 index 0000000..2df4d61 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/tag_set.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/47/b9/09/51aa58b1de73b2743832858bf44f57802ba15067475f071c729244b703 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/tagged_cache.py b/venv/lib/python3.8/site-packages/cachy/tagged_cache.py new file mode 120000 index 0000000..a84950e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/tagged_cache.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/03/bc/a5/ee355d33221320cf0fdda2fe5ff380b8498740ff173f3da334c76d6f33 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cachy/utils.py b/venv/lib/python3.8/site-packages/cachy/utils.py new file mode 120000 index 0000000..2405b60 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cachy/utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c3/62/69/0b08cb0c4f86a5d532b919e4400e215f28fe72697d7ad67845eb924064 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/LICENSE b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/LICENSE new file mode 100644 index 0000000..0a64774 --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/LICENSE @@ -0,0 +1,21 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/METADATA b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/METADATA new file mode 100644 index 0000000..92ea6c8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/METADATA @@ -0,0 +1,83 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2022.9.24 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. + + diff --git a/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/RECORD b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/RECORD new file mode 100644 index 0000000..59bd1df --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/RECORD @@ -0,0 +1,16 @@ +certifi-2022.9.24.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2022.9.24.dist-info/LICENSE,sha256=oC9sY4-fuE0G93ZMOrCF2K9-2luTwWbaVDEkeQd8b7A,1052 +certifi-2022.9.24.dist-info/METADATA,sha256=33NAOmkqKTCb2u1Ys8Zth7ABWXfEuLgp-5gLp1yK_7A,2911 +certifi-2022.9.24.dist-info/RECORD,, +certifi-2022.9.24.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +certifi-2022.9.24.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +certifi-2022.9.24.dist-info/direct_url.json,sha256=p5X2U1jXaPZKTlEQtU94glepM6F3cTiPCTAFOGUhSzc,253 +certifi-2022.9.24.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=luDjIGxDSrQ9O0zthdz5Lnt069Z_7eR1GIEefEaf-Ys,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-38.pyc,, +certifi/__pycache__/__main__.cpython-38.pyc,, +certifi/__pycache__/core.cpython-38.pyc,, +certifi/cacert.pem,sha256=3l8CcWt_qL42030rGieD3SLufICFX0bYtGhDl_EXVPI,286370 +certifi/core.py,sha256=lhewz0zFb2b4ULsQurElmloYwQoecjWzPqY67P8T7iM,4219 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/WHEEL b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/WHEEL new file mode 120000 index 0000000..e14c71b --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7b/0c/04/b9e8a8d42d977874ef4f5ee7f1d6542603afc82582b7459534b0a53fda \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/direct_url.json new file mode 100644 index 0000000..d1ee192 --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, "url": "https://files.pythonhosted.org/packages/1d/38/fa96a426e0c0e68aabc68e896584b83ad1eec779265a028e156ce509630e/certifi-2022.9.24-py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/top_level.txt new file mode 120000 index 0000000..78b421f --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi-2022.9.24.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/28/cb/b8/bd409fb232eb90f6d235d81d7a44bea552730402453bffe723c345ebe5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/certifi/__init__.py b/venv/lib/python3.8/site-packages/certifi/__init__.py new file mode 100644 index 0000000..af4bcc1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2022.09.24" diff --git a/venv/lib/python3.8/site-packages/certifi/__main__.py b/venv/lib/python3.8/site-packages/certifi/__main__.py new file mode 120000 index 0000000..1142611 --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi/__main__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c4/10/68/8fdd394d45812d118034e71fee88ba7beddd30fe1c1281bd3b232cd758 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/certifi/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/certifi/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..80f19d2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/certifi/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/certifi/__pycache__/__main__.cpython-38.pyc b/venv/lib/python3.8/site-packages/certifi/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000..d5cdd52 Binary files /dev/null and b/venv/lib/python3.8/site-packages/certifi/__pycache__/__main__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/certifi/__pycache__/core.cpython-38.pyc b/venv/lib/python3.8/site-packages/certifi/__pycache__/core.cpython-38.pyc new file mode 100644 index 0000000..b5206c1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/certifi/__pycache__/core.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/certifi/cacert.pem b/venv/lib/python3.8/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..4005155 --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi/cacert.pem @@ -0,0 +1,4708 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Label: "E-Tugra Global Root CA RSA v3" +# Serial: 75951268308633135324246244059508261641472512052 +# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4 +# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9 +# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2 +-----BEGIN CERTIFICATE----- +MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL +BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt +VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw +JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw +OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG +QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1 +Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD +QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7 +7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx +uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8 +7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/ +rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL +l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG +wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4 +znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO +M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK +5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH +nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo +DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy +tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL +BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ +6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18 +Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ +3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk +vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9 +9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ +mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA +VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF +9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM +moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8 +bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Label: "E-Tugra Global Root CA ECC v3" +# Serial: 218504919822255052842371958738296604628416471745 +# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64 +# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84 +# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13 +-----BEGIN CERTIFICATE----- +MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw +gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn +cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD +VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2 +NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r +YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh +IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF +Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ +KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK +fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB +Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C +MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp +ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6 +7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx +vmjkI6TZraE3 +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- diff --git a/venv/lib/python3.8/site-packages/certifi/core.py b/venv/lib/python3.8/site-packages/certifi/core.py new file mode 100644 index 0000000..de02898 --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi/core.py @@ -0,0 +1,108 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/lib/python3.8/site-packages/certifi/py.typed b/venv/lib/python3.8/site-packages/certifi/py.typed new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/certifi/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/LICENSE b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/LICENSE new file mode 120000 index 0000000..d7ad043 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/04/b8/0f/5b077bbed68808cfebadeb5e3523f2a8c9a96495c587bd96df1eac2a33 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/METADATA new file mode 100644 index 0000000..538e679 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/METADATA @@ -0,0 +1,34 @@ +Metadata-Version: 2.1 +Name: cffi +Version: 1.15.1 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: License :: OSI Approved :: MIT License +License-File: LICENSE +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ diff --git a/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/RECORD new file mode 100644 index 0000000..9ccedf7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/RECORD @@ -0,0 +1,44 @@ +_cffi_backend.cpython-38-x86_64-linux-gnu.so,sha256=Vexo81KP_uf8fqK4h5zftvakKuYQ_nFkS0sSlsOXee8,994936 +cffi-1.15.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi-1.15.1.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294 +cffi-1.15.1.dist-info/METADATA,sha256=KP4G3WmavRgDGwD2b8Y_eDsM1YeV6ckcG6Alz3-D8VY,1144 +cffi-1.15.1.dist-info/RECORD,, +cffi-1.15.1.dist-info/WHEEL,sha256=-ijGDuALlPxm3HbhKntps0QzHsi-DPlXqgerYTTJkFE,148 +cffi-1.15.1.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 +cffi-1.15.1.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi/__init__.py,sha256=6xB_tafGvhhM5Xvj0Ova3oPC2SEhVlLTEObVLnazeiM,513 +cffi/__pycache__/__init__.cpython-38.pyc,, +cffi/__pycache__/api.cpython-38.pyc,, +cffi/__pycache__/backend_ctypes.cpython-38.pyc,, +cffi/__pycache__/cffi_opcode.cpython-38.pyc,, +cffi/__pycache__/commontypes.cpython-38.pyc,, +cffi/__pycache__/cparser.cpython-38.pyc,, +cffi/__pycache__/error.cpython-38.pyc,, +cffi/__pycache__/ffiplatform.cpython-38.pyc,, +cffi/__pycache__/lock.cpython-38.pyc,, +cffi/__pycache__/model.cpython-38.pyc,, +cffi/__pycache__/pkgconfig.cpython-38.pyc,, +cffi/__pycache__/recompiler.cpython-38.pyc,, +cffi/__pycache__/setuptools_ext.cpython-38.pyc,, +cffi/__pycache__/vengine_cpy.cpython-38.pyc,, +cffi/__pycache__/vengine_gen.cpython-38.pyc,, +cffi/__pycache__/verifier.cpython-38.pyc,, +cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 +cffi/_cffi_include.h,sha256=tKnA1rdSoPHp23FnDL1mDGwFo-Uj6fXfA6vA6kcoEUc,14800 +cffi/_embedding.h,sha256=9tnjF44QRobR8z0FGqAmAZY-wMSBOae1SUPqHccowqc,17680 +cffi/api.py,sha256=yxJalIePbr1mz_WxAHokSwyP5CVYde44m-nolHnbJNo,42064 +cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 +cffi/cffi_opcode.py,sha256=v9RdD_ovA8rCtqsC95Ivki5V667rAOhGgs3fb2q9xpM,5724 +cffi/commontypes.py,sha256=QS4uxCDI7JhtTyjh1hlnCA-gynmaszWxJaRRLGkJa1A,2689 +cffi/cparser.py,sha256=rO_1pELRw1gI1DE1m4gi2ik5JMfpxouAACLXpRPlVEA,44231 +cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 +cffi/ffiplatform.py,sha256=HMXqR8ks2wtdsNxGaWpQ_PyqIvtiuos_vf1qKCy-cwg,4046 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=_GH_UF1Rn9vC4AvmgJm6qj7RUXXG3eqKPc8bPxxyBKE,21768 +cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 +cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 +cffi/recompiler.py,sha256=YgVYTh2CrXIobo-vMk7_K9mwAXdd_LqB4-IbYABQ488,64598 +cffi/setuptools_ext.py,sha256=RUR17N5f8gpiQBBlXL34P9FtOu1mhHIaAf3WJlg5S4I,8931 +cffi/vengine_cpy.py,sha256=YglN8YS-UaHEv2k2cxgotNWE87dHX20-68EyKoiKUYA,43320 +cffi/vengine_gen.py,sha256=5dX7s1DU6pTBOMI6oTVn_8Bnmru_lj932B6b4v29Hlg,26684 +cffi/verifier.py,sha256=ESwuXWXtXrKEagCKveLRDjFzLNCyaKdqAgAlKREcyhY,11253 diff --git a/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/WHEEL new file mode 120000 index 0000000..8b6429d --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fa/28/c6/0ee00b94fc66dc76e12a7b69b344331ec8be0cf957aa07ab6134c99051 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/entry_points.txt new file mode 100644 index 0000000..4b0274f --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/top_level.txt new file mode 120000 index 0000000..5d56cc3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi-1.15.1.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ac/4e/d6/477ad97cd2b1588f7e8e7ea1b0708097b303901f859ae41bc568c57a14 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/__init__.py b/venv/lib/python3.8/site-packages/cffi/__init__.py new file mode 100644 index 0000000..90e2e65 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "1.15.1" +__version_info__ = (1, 15, 1) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..417231e Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/api.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/api.cpython-38.pyc new file mode 100644 index 0000000..ea444e6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/api.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/backend_ctypes.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/backend_ctypes.cpython-38.pyc new file mode 100644 index 0000000..674e2e0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/backend_ctypes.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/cffi_opcode.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/cffi_opcode.cpython-38.pyc new file mode 100644 index 0000000..4e7e7f9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/cffi_opcode.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/commontypes.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/commontypes.cpython-38.pyc new file mode 100644 index 0000000..15d4889 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/commontypes.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/cparser.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/cparser.cpython-38.pyc new file mode 100644 index 0000000..cf0f688 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/cparser.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/error.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/error.cpython-38.pyc new file mode 100644 index 0000000..21205a5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/error.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/ffiplatform.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/ffiplatform.cpython-38.pyc new file mode 100644 index 0000000..be99a51 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/ffiplatform.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/lock.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/lock.cpython-38.pyc new file mode 100644 index 0000000..cf6dc79 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/lock.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/model.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/model.cpython-38.pyc new file mode 100644 index 0000000..6e2abb6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/model.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/pkgconfig.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/pkgconfig.cpython-38.pyc new file mode 100644 index 0000000..23a8f13 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/pkgconfig.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/recompiler.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/recompiler.cpython-38.pyc new file mode 100644 index 0000000..d41cc13 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/recompiler.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/setuptools_ext.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/setuptools_ext.cpython-38.pyc new file mode 100644 index 0000000..47f13bb Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/setuptools_ext.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/vengine_cpy.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/vengine_cpy.cpython-38.pyc new file mode 100644 index 0000000..67f2a95 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/vengine_cpy.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/vengine_gen.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/vengine_gen.cpython-38.pyc new file mode 100644 index 0000000..2519621 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/vengine_gen.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/__pycache__/verifier.cpython-38.pyc b/venv/lib/python3.8/site-packages/cffi/__pycache__/verifier.cpython-38.pyc new file mode 100644 index 0000000..148f544 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cffi/__pycache__/verifier.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cffi/_cffi_errors.h b/venv/lib/python3.8/site-packages/cffi/_cffi_errors.h new file mode 120000 index 0000000..56f0bcf --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/_cffi_errors.h @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cd/05/ed/eee47f9bc8145be7c8da1260d0aa129091705eff111949040d9d7bedd4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/_cffi_include.h b/venv/lib/python3.8/site-packages/cffi/_cffi_include.h new file mode 120000 index 0000000..b6c53a6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/_cffi_include.h @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b4/a9/c0/d6b752a0f1e9db71670cbd660c6c05a3e523e9f5df03abc0ea47281147 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/_embedding.h b/venv/lib/python3.8/site-packages/cffi/_embedding.h new file mode 100644 index 0000000..8e8df88 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/_embedding.h @@ -0,0 +1,528 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. If _cffi_start_python() fails, then this is set + to NULL; otherwise, it should never be NULL. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.15.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = -42; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value == locked_value); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/venv/lib/python3.8/site-packages/cffi/api.py b/venv/lib/python3.8/site-packages/cffi/api.py new file mode 120000 index 0000000..a8018fe --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/api.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cb/12/5a/94878f6ebd66cff5b1007a244b0c8fe4255875ee389be9e89479db24da \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/backend_ctypes.py b/venv/lib/python3.8/site-packages/cffi/backend_ctypes.py new file mode 120000 index 0000000..b8261ad --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/backend_ctypes.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/87/96/48/ccb73a0455579c6c9cf713ea656512eea1b2ef21520ea5deebc49af33e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/cffi_opcode.py b/venv/lib/python3.8/site-packages/cffi/cffi_opcode.py new file mode 120000 index 0000000..520a84b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/cffi_opcode.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bf/d4/5d/0ffa2f03cac2b6ab02f7922f922e55ebaeeb00e84682cddf6f6abdc693 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/commontypes.py b/venv/lib/python3.8/site-packages/cffi/commontypes.py new file mode 120000 index 0000000..3d94af4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/commontypes.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/41/2e/2e/c420c8ec986d4f28e1d61967080fa0ca799ab335b125a4512c69096b50 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/cparser.py b/venv/lib/python3.8/site-packages/cffi/cparser.py new file mode 120000 index 0000000..4ed09b8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/cparser.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ac/ef/f5/a442d1c35808d431359b8822da293924c7e9c68b800022d7a513e55440 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/error.py b/venv/lib/python3.8/site-packages/cffi/error.py new file mode 120000 index 0000000..c7e23c5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/error.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bf/ac/53/892e14d24bc3732e21fc10d1a39bf7f5942e8fe20c4582efe444dd759b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/ffiplatform.py b/venv/lib/python3.8/site-packages/cffi/ffiplatform.py new file mode 120000 index 0000000..5f522b8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/ffiplatform.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1c/c5/ea/47c92cdb0b5db0dc46696a50fcfcaa22fb62ba8b3fbdfd6a282cbe7308 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/lock.py b/venv/lib/python3.8/site-packages/cffi/lock.py new file mode 120000 index 0000000..63f805d --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/lock.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/97/d4/d3/7703083298ba8c39091a742013d72f4c847b0809ed209afc1061edde96 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/model.py b/venv/lib/python3.8/site-packages/cffi/model.py new file mode 120000 index 0000000..6496071 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/model.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fc/61/ff/505d519fdbc2e00be68099baaa3ed15175c6ddea8a3dcf1b3f1c7204a1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/parse_c_type.h b/venv/lib/python3.8/site-packages/cffi/parse_c_type.h new file mode 120000 index 0000000..c4fdda0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/parse_c_type.h @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/39/dc/10/7f033d92dababe5081e377b11509b10c1b63d8c04d74af0b625d79b63c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/pkgconfig.py b/venv/lib/python3.8/site-packages/cffi/pkgconfig.py new file mode 120000 index 0000000..d6f4fae --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/pkgconfig.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2c/fd/70/eef996be62b0caa2da535676e3714e58635032b80c519aef805b8e95ca \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/recompiler.py b/venv/lib/python3.8/site-packages/cffi/recompiler.py new file mode 100644 index 0000000..5d9d32d --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/recompiler.py @@ -0,0 +1,1581 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy > 0 or self.ffi._embedding is not None: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if flags & 1: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/venv/lib/python3.8/site-packages/cffi/setuptools_ext.py b/venv/lib/python3.8/site-packages/cffi/setuptools_ext.py new file mode 120000 index 0000000..47acf7f --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/setuptools_ext.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/45/44/75/ecde5ff20a624010655cbdf83fd16d3aed6684721a01fdd62658394b82 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/vengine_cpy.py b/venv/lib/python3.8/site-packages/cffi/vengine_cpy.py new file mode 120000 index 0000000..63f3395 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/vengine_cpy.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/62/09/4d/f184be51a1c4bf6936731828b4d584f3b7475f6d3eebc1322a888a5180 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/vengine_gen.py b/venv/lib/python3.8/site-packages/cffi/vengine_gen.py new file mode 120000 index 0000000..602378a --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/vengine_gen.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e5/d5/fb/b350d4ea94c138c23aa13567ffc0679abbbf963f77d81e9be2fdbd1e58 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cffi/verifier.py b/venv/lib/python3.8/site-packages/cffi/verifier.py new file mode 120000 index 0000000..73a8775 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cffi/verifier.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/11/2c/2e/5d65ed5eb2846a008abde2d10e31732cd0b268a76a02002529111cca16 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/LICENSE b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/LICENSE new file mode 120000 index 0000000..e071049 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/eb/31/a0/c5a4fb09b8a4e32055d25c1e5f9c358a2752fef3cd720213d1ccfee241 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/METADATA new file mode 100644 index 0000000..c6ebd7d --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/METADATA @@ -0,0 +1,269 @@ +Metadata-Version: 2.1 +Name: charset-normalizer +Version: 2.1.1 +Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. +Home-page: https://github.com/ousret/charset_normalizer +Author: Ahmed TAHRI @Ousret +Author-email: ahmed.tahri@cloudnursery.dev +License: MIT +Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues +Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest +Keywords: encoding,i18n,txt,text,charset,charset-detector,normalization,unicode,chardet +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Text Processing :: Linguistic +Classifier: Topic :: Utilities +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Requires-Python: >=3.6.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: unicode_backport +Requires-Dist: unicodedata2 ; extra == 'unicode_backport' + + +

    Charset Detection, for Everyone 👋

    + +

    + The Real First Universal Charset Detector
    + + + + + + + + Download Count Total + +

    + +> A library that helps you read text from an unknown charset encoding.
    Motivated by `chardet`, +> I'm trying to resolve the issue by taking a new approach. +> All IANA character set names for which the Python core library provides codecs are supported. + +

    + >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< +

    + +This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. + +| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | +| ------------- | :-------------: | :------------------: | :------------------: | +| `Fast` | ❌
    | ✅
    | ✅
    | +| `Universal**` | ❌ | ✅ | ❌ | +| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | +| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | +| `License` | LGPL-2.1
    _restrictive_ | MIT | MPL-1.1
    _restrictive_ | +| `Native Python` | ✅ | ✅ | ❌ | +| `Detect spoken language` | ❌ | ✅ | N/A | +| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | +| `Whl Size` | 193.6 kB | 39.5 kB | ~200 kB | +| `Supported Encoding` | 33 | :tada: [93](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 + +

    +Reading Normalized TextCat Reading Text + +*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
    +Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) + +## ⭐ Your support + +*Fork, test-it, star-it, submit your ideas! We do listen.* + +## ⚡ Performance + +This package offer better performance than its counterpart Chardet. Here are some numbers. + +| Package | Accuracy | Mean per file (ms) | File per sec (est) | +| ------------- | :-------------: | :------------------: | :------------------: | +| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec | +| charset-normalizer | **98 %** | **39 ms** | 26 file/sec | + +| Package | 99th percentile | 95th percentile | 50th percentile | +| ------------- | :-------------: | :------------------: | :------------------: | +| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms | +| charset-normalizer | 400 ms | 200 ms | 15 ms | + +Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. + +> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. +> And yes, these results might change at any time. The dataset can be updated to include more files. +> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. +> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability +> (eg. Supported Encoding) Challenge-them if you want. + +[cchardet](https://github.com/PyYoshi/cChardet) is a non-native (cpp binding) and unmaintained faster alternative with +a better accuracy than chardet but lower than this package. If speed is the most important factor, you should try it. + +## ✨ Installation + +Using PyPi for latest stable +```sh +pip install charset-normalizer -U +``` + +If you want a more up-to-date `unicodedata` than the one available in your Python setup. +```sh +pip install charset-normalizer[unicode_backport] -U +``` + +## 🚀 Basic Usage + +### CLI +This package comes with a CLI. + +``` +usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] + file [file ...] + +The Real First Universal Charset Detector. Discover originating encoding used +on text file. Normalize text to unicode. + +positional arguments: + files File(s) to be analysed + +optional arguments: + -h, --help show this help message and exit + -v, --verbose Display complementary information about file if any. + Stdout will contain logs about the detection process. + -a, --with-alternative + Output complementary possibilities if any. Top-level + JSON WILL be a list. + -n, --normalize Permit to normalize input file. If not set, program + does not write anything. + -m, --minimal Only output the charset detected to STDOUT. Disabling + JSON output. + -r, --replace Replace file when trying to normalize it instead of + creating a new one. + -f, --force Replace file without asking if you are sure, use this + flag with caution. + -t THRESHOLD, --threshold THRESHOLD + Define a custom maximum amount of chaos allowed in + decoded content. 0. <= chaos <= 1. + --version Show version information and exit. +``` + +```bash +normalizer ./data/sample.1.fr.srt +``` + +:tada: Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. + +```json +{ + "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", + "encoding": "cp1252", + "encoding_aliases": [ + "1252", + "windows_1252" + ], + "alternative_encodings": [ + "cp1254", + "cp1256", + "cp1258", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + "mbcs" + ], + "language": "French", + "alphabets": [ + "Basic Latin", + "Latin-1 Supplement" + ], + "has_sig_or_bom": false, + "chaos": 0.149, + "coherence": 97.152, + "unicode_path": null, + "is_preferred": true +} +``` + +### Python +*Just print out normalized text* +```python +from charset_normalizer import from_path + +results = from_path('./my_subtitle.srt') + +print(str(results.best())) +``` + +*Normalize any text file* +```python +from charset_normalizer import normalize +try: + normalize('./my_subtitle.srt') # should write to disk my_subtitle-***.srt +except IOError as e: + print('Sadly, we are unable to perform charset normalization.', str(e)) +``` + +*Upgrade your code without effort* +```python +from charset_normalizer import detect +``` + +The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. + +See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) + +## 😇 Why + +When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a +reliable alternative using a completely different method. Also! I never back down on a good challenge! + +I **don't care** about the **originating charset** encoding, because **two different tables** can +produce **two identical rendered string.** +What I want is to get readable text, the best I can. + +In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 + +Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. + +## 🍰 How + + - Discard all charset encoding table that could not fit the binary content. + - Measure chaos, or the mess once opened (by chunks) with a corresponding charset encoding. + - Extract matches with the lowest mess detected. + - Additionally, we measure coherence / probe for a language. + +**Wait a minute**, what is chaos/mess and coherence according to **YOU ?** + +*Chaos :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then +**I established** some ground rules about **what is obvious** when **it seems like** a mess. + I know that my interpretation of what is chaotic is very subjective, feel free to contribute in order to + improve or rewrite it. + +*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought +that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. + +## ⚡ Known limitations + + - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) + - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. + +## 👤 Contributing + +Contributions, issues and feature requests are very much welcome.
    +Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. + +## 📝 License + +Copyright © 2019 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
    +This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. + +Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) diff --git a/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/RECORD new file mode 100644 index 0000000..72fce10 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/RECORD @@ -0,0 +1,35 @@ +../../../bin/normalizer,sha256=dbGhdDNurqKVceVJ8hBPqZi-qfTvsVKpUN6CX3Gebls,248 +charset_normalizer-2.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +charset_normalizer-2.1.1.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 +charset_normalizer-2.1.1.dist-info/METADATA,sha256=C99l12g4d1E9_UiW-mqPCWx7v2M_lYGWxy1GTOjXSsA,11942 +charset_normalizer-2.1.1.dist-info/RECORD,, +charset_normalizer-2.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer-2.1.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +charset_normalizer-2.1.1.dist-info/direct_url.json,sha256=-w03Sh0HeQJNkuGIHFTgG2pmZ0zxKTiG3ilaAUKBu4U,260 +charset_normalizer-2.1.1.dist-info/entry_points.txt,sha256=uYo8aIGLWv8YgWfSna5HnfY_En4pkF1w4bgawNAXzP0,76 +charset_normalizer-2.1.1.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 +charset_normalizer/__init__.py,sha256=jGhhf1IcOgCpZsr593E9fPvjWKnflVqHe_LwkOJjInU,1790 +charset_normalizer/__pycache__/__init__.cpython-38.pyc,, +charset_normalizer/__pycache__/api.cpython-38.pyc,, +charset_normalizer/__pycache__/cd.cpython-38.pyc,, +charset_normalizer/__pycache__/constant.cpython-38.pyc,, +charset_normalizer/__pycache__/legacy.cpython-38.pyc,, +charset_normalizer/__pycache__/md.cpython-38.pyc,, +charset_normalizer/__pycache__/models.cpython-38.pyc,, +charset_normalizer/__pycache__/utils.cpython-38.pyc,, +charset_normalizer/__pycache__/version.cpython-38.pyc,, +charset_normalizer/api.py,sha256=euVPmjAMbjpqhEHPjfKtyy1mK52U0TOUBUQgM_Qy6eE,19191 +charset_normalizer/assets/__init__.py,sha256=r7aakPaRIc2FFG2mw2V8NOTvkl25_euKZ3wPf5SAVa4,15222 +charset_normalizer/assets/__pycache__/__init__.cpython-38.pyc,, +charset_normalizer/cd.py,sha256=Pxdkbn4cy0iZF42KTb1FiWIqqKobuz_fDjGwc6JMNBc,10811 +charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/cli/__pycache__/__init__.cpython-38.pyc,, +charset_normalizer/cli/__pycache__/normalizer.cpython-38.pyc,, +charset_normalizer/cli/normalizer.py,sha256=FmD1RXeMpRBg_mjR0MaJhNUpM2qZ8wz2neAE7AayBeg,9521 +charset_normalizer/constant.py,sha256=NgU-pY8JH2a9lkVT8oKwAFmIUYNKOuSBwZgF9MrlNCM,19157 +charset_normalizer/legacy.py,sha256=XKeZOts_HdYQU_Jb3C9ZfOjY2CiUL132k9_nXer8gig,3384 +charset_normalizer/md.py,sha256=pZP8IVpSC82D8INA9Tf_y0ijJSRI-UIncZvLdfTWEd4,17642 +charset_normalizer/models.py,sha256=i68YdlSLTEI3EEBVXq8TLNAbyyjrLC2OWszc-OBAk9I,13167 +charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/utils.py,sha256=ykOznhcAeH-ODLBWJuI7t1nbwa1SAfN_bDYTCJGyh4U,11771 +charset_normalizer/version.py,sha256=_eh2MA3qS__IajlePQxKBmlw6zaBDvPYlLdEgxgIojw,79 diff --git a/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/WHEEL new file mode 120000 index 0000000..7e89bc1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1b/5e/87/e00dc87a84269cead8578b9e6462928e18a95f1f3373c9eef451a5bcc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/direct_url.json new file mode 100644 index 0000000..c7199a2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, "url": "https://files.pythonhosted.org/packages/db/51/a507c856293ab05cdc1db77ff4bc1268ddd39f29e7dc4919aa497f0adbec/charset_normalizer-2.1.1-py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/entry_points.txt new file mode 100644 index 0000000..a06d360 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +normalizer = charset_normalizer.cli.normalizer:cli_detect diff --git a/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/top_level.txt new file mode 120000 index 0000000..11d4208 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer-2.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ec/04/b2/cde3ebf3fc6e65626c9ea263201b7257cbe1128d30042bf530f4518b74 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__init__.py b/venv/lib/python3.8/site-packages/charset_normalizer/__init__.py new file mode 100644 index 0000000..2dcaf56 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" +import logging + +from .api import from_bytes, from_fp, from_path, normalize +from .legacy import ( + CharsetDetector, + CharsetDoctor, + CharsetNormalizerMatch, + CharsetNormalizerMatches, + detect, +) +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "normalize", + "detect", + "CharsetMatch", + "CharsetMatches", + "CharsetNormalizerMatch", + "CharsetNormalizerMatches", + "CharsetDetector", + "CharsetDoctor", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..0d1c12d Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/api.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/api.cpython-38.pyc new file mode 100644 index 0000000..596685f Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/api.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/cd.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/cd.cpython-38.pyc new file mode 100644 index 0000000..5e45515 Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/cd.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/constant.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/constant.cpython-38.pyc new file mode 100644 index 0000000..4dd95b9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/constant.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/legacy.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/legacy.cpython-38.pyc new file mode 100644 index 0000000..6d80854 Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/legacy.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/md.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/md.cpython-38.pyc new file mode 100644 index 0000000..554eb56 Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/md.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/models.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/models.cpython-38.pyc new file mode 100644 index 0000000..11314d5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/models.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..65cd383 Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/version.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000..2243482 Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/__pycache__/version.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/api.py b/venv/lib/python3.8/site-packages/charset_normalizer/api.py new file mode 100644 index 0000000..72907f9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/api.py @@ -0,0 +1,584 @@ +import logging +import warnings +from os import PathLike +from os.path import basename, splitext +from typing import Any, BinaryIO, List, Optional, Set + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + cut_sequence_chunks, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +# Will most likely be controversial +# logging.addLevelName(TRACE, "TRACE") +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: bytes, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocs of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {0}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level: int = logger.level + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length: int = len(sequences) + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE + is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings: List[str] = [] + + specified_encoding: Optional[str] = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested: Set[str] = set() + tested_but_hard_failure: List[str] = [] + tested_but_soft_failure: List[str] = [] + + fallback_ascii: Optional[CharsetMatch] = None + fallback_u8: Optional[CharsetMatch] = None + fallback_specified: Optional[CharsetMatch] = None + + results: CharsetMatches = CharsetMatches() + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload: Optional[str] = None + bom_or_sig_available: bool = sig_encoding == encoding_iana + strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s wont be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)], + encoding=encoding_iana, + ) + else: + decoded_payload = str( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :], + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test: bool = False + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus: bool = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up: int = int(len(r_) / 4) + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count: int = 0 + lazy_str_hard_failure = False + + md_chunks: List[str] = [] + md_ratios = [] + + try: + for chunk in cut_sequence_chunks( + sequences, + encoding_iana, + r_, + chunk_size, + bom_or_sig_available, + strip_sig_or_bom, + sig_payload, + is_multi_byte_decoder, + decoded_payload, + ): + md_chunks.append(chunk) + + md_ratios.append(mess_ratio(chunk, threshold)) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + except UnicodeDecodeError as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + encoding_iana in ["ascii", "utf_8", specified_encoding] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, encoding_iana, threshold, False, [], decoded_payload + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages: List[str] = encoding_languages(encoding_iana) + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, 0.1, ",".join(target_languages) if target_languages else None + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + results.append( + CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + decoded_payload, + ) + ) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + logger.debug( + "Encoding detection: %s is most likely the one.", encoding_iana + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + ) + + +def from_path( + path: "PathLike[Any]", + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + ) + + +def normalize( + path: "PathLike[Any]", + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, +) -> CharsetMatch: + """ + Take a (text-based) file path and try to create another file next to it, this time using UTF-8. + """ + warnings.warn( + "normalize is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + + results = from_path( + path, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + ) + + filename = basename(path) + target_extensions = list(splitext(filename)) + + if len(results) == 0: + raise IOError( + 'Unable to normalize "{}", no encoding charset seems to fit.'.format( + filename + ) + ) + + result = results.best() + + target_extensions[0] += "-" + result.encoding # type: ignore + + with open( + "{}".format(str(path).replace(filename, "".join(target_extensions))), "wb" + ) as fp: + fp.write(result.output()) # type: ignore + + return result # type: ignore diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/assets/__init__.py b/venv/lib/python3.8/site-packages/charset_normalizer/assets/__init__.py new file mode 100644 index 0000000..3c33ba3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/assets/__init__.py @@ -0,0 +1,1122 @@ +# -*- coding: utf-8 -*- +from typing import Dict, List + +FREQUENCIES: Dict[str, List[str]] = { + "English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + "German": [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + "French": [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + "Dutch": [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + "Italian": [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + "Polish": [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + "Spanish": [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + "Russian": [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + "Japanese": [ + "の", + "に", + "る", + "た", + "は", + "ー", + "と", + "し", + "を", + "で", + "て", + "が", + "い", + "ン", + "れ", + "な", + "年", + "ス", + "っ", + "ル", + "か", + "ら", + "あ", + "さ", + "も", + "り", + ], + "Portuguese": [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + "Swedish": [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + "Chinese": [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + ], + "Ukrainian": [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + "Norwegian": [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + "Finnish": [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + "Vietnamese": [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + "Czech": [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + "Hungarian": [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + "Korean": [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + "Indonesian": [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + "Turkish": [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + "Romanian": [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + "Farsi": [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + "Arabic": [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + "Danish": [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + "Serbian": [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + "Lithuanian": [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + "Slovene": [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + "Slovak": [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + "Hebrew": [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + "Bulgarian": [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + "Croatian": [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + "Hindi": [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + "Estonian": [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + "Simple English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + "Thai": [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + "Greek": [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + "Tamil": [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + "Classical Chinese": [ + "之", + "年", + "為", + "也", + "以", + "一", + "人", + "其", + "者", + "國", + "有", + "二", + "十", + "於", + "曰", + "三", + "不", + "大", + "而", + "子", + "中", + "五", + "四", + ], + "Kazakh": [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], +} diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/assets/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/assets/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..fa375de Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/assets/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/cd.py b/venv/lib/python3.8/site-packages/charset_normalizer/cd.py new file mode 100644 index 0000000..ee4b742 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/cd.py @@ -0,0 +1,339 @@ +import importlib +from codecs import IncrementalDecoder +from collections import Counter +from functools import lru_cache +from typing import Counter as TypeCounter, Dict, List, Optional, Tuple + +from .assets import FREQUENCIES +from .constant import KO_NAMES, LANGUAGE_SUPPORTED_COUNT, TOO_SMALL_SEQUENCE, ZH_NAMES +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> List[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise IOError("Function not supported on multi-byte code page") + + decoder = importlib.import_module( + "encodings.{}".format(iana_name) + ).IncrementalDecoder + + p: IncrementalDecoder = decoder(errors="ignore") + seen_ranges: Dict[str, int] = {} + character_count: int = 0 + + for i in range(0x40, 0xFF): + chunk: str = p.decode(bytes([i])) + + if chunk: + character_range: Optional[str] = unicode_range(chunk) + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> List[str]: + """ + Return inferred languages used with a unicode range. + """ + languages: List[str] = [] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> List[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges: List[str] = encoding_unicode_range(iana_name) + primary_range: Optional[str] = None + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> List[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese", "Classical Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> Tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents: bool = False + target_pure_latin: bool = True + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: List[str], ignore_non_latin: bool = False +) -> List[str]: + """ + Return associated languages associated to given characters. + """ + languages: List[Tuple[str, float]] = [] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count: int = len(language_characters) + + character_match_count: int = len( + [c for c in language_characters if c in characters] + ) + + ratio: float = character_match_count / character_count + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: List[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError("{} not available".format(language)) + + character_approved_count: int = 0 + FREQUENCIES_language_set = set(FREQUENCIES[language]) + + for character in ordered_characters: + if character not in FREQUENCIES_language_set: + continue + + characters_before_source: List[str] = FREQUENCIES[language][ + 0 : FREQUENCIES[language].index(character) + ] + characters_after_source: List[str] = FREQUENCIES[language][ + FREQUENCIES[language].index(character) : + ] + characters_before: List[str] = ordered_characters[ + 0 : ordered_characters.index(character) + ] + characters_after: List[str] = ordered_characters[ + ordered_characters.index(character) : + ] + + before_match_count: int = len( + set(characters_before) & set(characters_before_source) + ) + + after_match_count: int = len( + set(characters_after) & set(characters_after_source) + ) + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> List[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers: Dict[str, str] = {} + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + layer_target_range: Optional[str] = None + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios: Dict[str, List[float]] = {} + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results: List[Tuple[str, float]] = [] + ignore_non_latin: bool = False + + sufficient_match_count: int = 0 + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies: TypeCounter[str] = Counter(layer) + most_common = sequence_frequencies.most_common() + + character_count: int = sum(o for c, o in most_common) + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered: List[str] = [c for c, o in most_common] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio: float = characters_popularity_compare( + language, popular_character_ordered + ) + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted(results, key=lambda x: x[1], reverse=True) diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/cli/__init__.py b/venv/lib/python3.8/site-packages/charset_normalizer/cli/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/cli/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..97f2107 Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/cli/__pycache__/normalizer.cpython-38.pyc b/venv/lib/python3.8/site-packages/charset_normalizer/cli/__pycache__/normalizer.cpython-38.pyc new file mode 100644 index 0000000..89c29aa Binary files /dev/null and b/venv/lib/python3.8/site-packages/charset_normalizer/cli/__pycache__/normalizer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/cli/normalizer.py b/venv/lib/python3.8/site-packages/charset_normalizer/cli/normalizer.py new file mode 100644 index 0000000..b8b652a --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/cli/normalizer.py @@ -0,0 +1,295 @@ +import argparse +import sys +from json import dumps +from os.path import abspath +from platform import python_version +from typing import List, Optional + +try: + from unicodedata2 import unidata_version +except ImportError: + from unicodedata import unidata_version + +from charset_normalizer import from_fp +from charset_normalizer.models import CliDetectionResult +from charset_normalizer.version import __version__ + + +def query_yes_no(question: str, default: str = "yes") -> bool: + """Ask a yes/no question via input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is True for "yes" or False for "no". + + Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input + """ + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + +def cli_detect(argv: Optional[List[str]] = None) -> int: + """ + CLI assistant using ARGV and ArgumentParser + :param argv: + :return: 0 if everything is fine, anything else equal trouble + """ + parser = argparse.ArgumentParser( + description="The Real First Universal Charset Detector. " + "Discover originating encoding used on text file. " + "Normalize text to unicode." + ) + + parser.add_argument( + "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + dest="verbose", + help="Display complementary information about file if any. " + "Stdout will contain logs about the detection process.", + ) + parser.add_argument( + "-a", + "--with-alternative", + action="store_true", + default=False, + dest="alternatives", + help="Output complementary possibilities if any. Top-level JSON WILL be a list.", + ) + parser.add_argument( + "-n", + "--normalize", + action="store_true", + default=False, + dest="normalize", + help="Permit to normalize input file. If not set, program does not write anything.", + ) + parser.add_argument( + "-m", + "--minimal", + action="store_true", + default=False, + dest="minimal", + help="Only output the charset detected to STDOUT. Disabling JSON output.", + ) + parser.add_argument( + "-r", + "--replace", + action="store_true", + default=False, + dest="replace", + help="Replace file when trying to normalize it instead of creating a new one.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + dest="force", + help="Replace file without asking if you are sure, use this flag with caution.", + ) + parser.add_argument( + "-t", + "--threshold", + action="store", + default=0.2, + type=float, + dest="threshold", + help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + ) + parser.add_argument( + "--version", + action="version", + version="Charset-Normalizer {} - Python {} - Unicode {}".format( + __version__, python_version(), unidata_version + ), + help="Show version information and exit.", + ) + + args = parser.parse_args(argv) + + if args.replace is True and args.normalize is False: + print("Use --replace in addition of --normalize only.", file=sys.stderr) + return 1 + + if args.force is True and args.replace is False: + print("Use --force in addition of --replace only.", file=sys.stderr) + return 1 + + if args.threshold < 0.0 or args.threshold > 1.0: + print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) + return 1 + + x_ = [] + + for my_file in args.files: + + matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose) + + best_guess = matches.best() + + if best_guess is None: + print( + 'Unable to identify originating encoding for "{}". {}'.format( + my_file.name, + "Maybe try increasing maximum amount of chaos." + if args.threshold < 1.0 + else "", + ), + file=sys.stderr, + ) + x_.append( + CliDetectionResult( + abspath(my_file.name), + None, + [], + [], + "Unknown", + [], + False, + 1.0, + 0.0, + None, + True, + ) + ) + else: + x_.append( + CliDetectionResult( + abspath(my_file.name), + best_guess.encoding, + best_guess.encoding_aliases, + [ + cp + for cp in best_guess.could_be_from_charset + if cp != best_guess.encoding + ], + best_guess.language, + best_guess.alphabets, + best_guess.bom, + best_guess.percent_chaos, + best_guess.percent_coherence, + None, + True, + ) + ) + + if len(matches) > 1 and args.alternatives: + for el in matches: + if el != best_guess: + x_.append( + CliDetectionResult( + abspath(my_file.name), + el.encoding, + el.encoding_aliases, + [ + cp + for cp in el.could_be_from_charset + if cp != el.encoding + ], + el.language, + el.alphabets, + el.bom, + el.percent_chaos, + el.percent_coherence, + None, + False, + ) + ) + + if args.normalize is True: + + if best_guess.encoding.startswith("utf") is True: + print( + '"{}" file does not need to be normalized, as it already came from unicode.'.format( + my_file.name + ), + file=sys.stderr, + ) + if my_file.closed is False: + my_file.close() + continue + + o_: List[str] = my_file.name.split(".") + + if args.replace is False: + o_.insert(-1, best_guess.encoding) + if my_file.closed is False: + my_file.close() + elif ( + args.force is False + and query_yes_no( + 'Are you sure to normalize "{}" by replacing it ?'.format( + my_file.name + ), + "no", + ) + is False + ): + if my_file.closed is False: + my_file.close() + continue + + try: + x_[0].unicode_path = abspath("./{}".format(".".join(o_))) + + with open(x_[0].unicode_path, "w", encoding="utf-8") as fp: + fp.write(str(best_guess)) + except IOError as e: + print(str(e), file=sys.stderr) + if my_file.closed is False: + my_file.close() + return 2 + + if my_file.closed is False: + my_file.close() + + if args.minimal is False: + print( + dumps( + [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, + ensure_ascii=True, + indent=4, + ) + ) + else: + for my_file in args.files: + print( + ", ".join( + [ + el.encoding or "undefined" + for el in x_ + if el.path == abspath(my_file.name) + ] + ) + ) + + return 0 + + +if __name__ == "__main__": + cli_detect() diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/constant.py b/venv/lib/python3.8/site-packages/charset_normalizer/constant.py new file mode 120000 index 0000000..9e8f7d6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/constant.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/36/05/3e/a58f091f66bd964553f282b000598851834a3ae481c19805f4cae53423 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/legacy.py b/venv/lib/python3.8/site-packages/charset_normalizer/legacy.py new file mode 120000 index 0000000..d641639 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/legacy.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5c/a7/99/3adb3f1dd61053f25bdc2f597ce8d8d828942f5df693dfe75deafc8228 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/md.py b/venv/lib/python3.8/site-packages/charset_normalizer/md.py new file mode 120000 index 0000000..fc60e7d --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/md.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a5/93/fc/215a520bcd83f08340f537ffcb48a3252448f94227719bcb75f4d611de \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/models.py b/venv/lib/python3.8/site-packages/charset_normalizer/models.py new file mode 100644 index 0000000..ccb0d47 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/models.py @@ -0,0 +1,401 @@ +import warnings +from collections import Counter +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from re import sub +from typing import ( + Any, + Counter as TypeCounter, + Dict, + Iterator, + List, + Optional, + Tuple, + Union, +) + +from .constant import NOT_PRINTABLE_PATTERN, TOO_BIG_SEQUENCE +from .md import mess_ratio +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: "CoherenceMatches", + decoded_payload: Optional[str] = None, + ): + self._payload: bytes = payload + + self._encoding: str = guessed_encoding + self._mean_mess_ratio: float = mean_mess_ratio + self._languages: CoherenceMatches = languages + self._has_sig_or_bom: bool = has_sig_or_bom + self._unicode_ranges: Optional[List[str]] = None + + self._leaves: List[CharsetMatch] = [] + self._mean_coherence_ratio: float = 0.0 + + self._output_payload: Optional[bytes] = None + self._output_encoding: Optional[str] = None + + self._string: Optional[str] = decoded_payload + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + raise TypeError( + "__eq__ cannot be invoked on {} and {}.".format( + str(other.__class__), str(self.__class__) + ) + ) + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference: float = abs(self.chaos - other.chaos) + coherence_difference: float = abs(self.coherence - other.coherence) + + # Bellow 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + # When having a tough decision, use the result that decoded as many multi-byte as possible. + if chaos_difference == 0.0 and self.coherence == other.coherence: + return self.multi_byte_usage > other.multi_byte_usage + return self.coherence > other.coherence + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - len(str(self)) / len(self.raw) + + @property + def chaos_secondary_pass(self) -> float: + """ + Check once again chaos in decoded text, except this time, with full content. + Use with caution, this can be very slow. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "chaos_secondary_pass is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return mess_ratio(str(self), 1.0) + + @property + def coherence_non_latin(self) -> float: + """ + Coherence ratio on the first non-latin language detected if ANY. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "coherence_non_latin is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return 0.0 + + @property + def w_counter(self) -> TypeCounter[str]: + """ + Word counter instance on decoded text. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "w_counter is deprecated and will be removed in 3.0", DeprecationWarning + ) + + string_printable_only = sub(NOT_PRINTABLE_PATTERN, " ", str(self).lower()) + + return Counter(string_printable_only.split()) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return "".format(self.encoding, self.fingerprint) + + def add_submatch(self, other: "CharsetMatch") -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> List[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as: List[str] = [] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> List[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> List["CharsetMatch"]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> List[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges: List[Optional[str]] = [ + unicode_range(char) for char in str(self) + ] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> List[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def first(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def best(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + self._output_payload = str(self).encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: Optional[List[CharsetMatch]] = None): + self._results: List[CharsetMatch] = sorted(results) if results else [] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) <= TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> Optional["CharsetMatch"]: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> Optional["CharsetMatch"]: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: Optional[str], + encoding_aliases: List[str], + alternative_encodings: List[str], + language: str, + alphabets: List[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: Optional[str], + is_preferred: bool, + ): + self.path: str = path + self.unicode_path: Optional[str] = unicode_path + self.encoding: Optional[str] = encoding + self.encoding_aliases: List[str] = encoding_aliases + self.alternative_encodings: List[str] = alternative_encodings + self.language: str = language + self.alphabets: List[str] = alphabets + self.has_sig_or_bom: bool = has_sig_or_bom + self.chaos: float = chaos + self.coherence: float = coherence + self.is_preferred: bool = is_preferred + + @property + def __dict__(self) -> Dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/py.typed b/venv/lib/python3.8/site-packages/charset_normalizer/py.typed new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/utils.py b/venv/lib/python3.8/site-packages/charset_normalizer/utils.py new file mode 100644 index 0000000..859f212 --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/utils.py @@ -0,0 +1,424 @@ +try: + # WARNING: unicodedata2 support is going to be removed in 3.0 + # Python is quickly catching up. + import unicodedata2 as unicodedata +except ImportError: + import unicodedata # type: ignore[no-redef] + +import importlib +import logging +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import Generator, List, Optional, Set, Tuple, Union + +from _multibytecodec import MultibyteIncrementalDecoder + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed: str = unicodedata.decomposition(character) + if not decomposed: + return character + + codes: List[str] = decomposed.split(" ") + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> Optional[str]: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord: int = ord(character) + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return "LATIN" in description + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_ascii(character: str) -> bool: + try: + character.encode("ascii") + except UnicodeEncodeError: + return False + return True + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "P" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "S" in character_category or "N" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Forms" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Emoticons" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", ",", ";", "<", ">"}: + return True + + character_category: str = unicodedata.category(character) + + return "Z" in character_category + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +def is_private_use_only(character: str) -> bool: + character_category: str = unicodedata.category(character) + + return character_category == "Co" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_unprintable(character: str) -> bool: + return ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1A" # Why? Its the ASCII substitute character. + and character != "\ufeff" # bug discovered in Python, + # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. + ) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 4096) -> Optional[str]: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len: int = len(sequence) + + results: List[str] = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + cp_name = cp_name.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + + return cp_name + + +def range_scan(decoded_sequence: str) -> List[str]: + ranges: Set[str] = set() + + for character in decoded_sequence: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + ranges.add(character_range) + + return list(ranges) + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module( + "encodings.{}".format(iana_name_a) + ).IncrementalDecoder + decoder_b = importlib.import_module( + "encodings.{}".format(iana_name_b) + ).IncrementalDecoder + + id_a: IncrementalDecoder = decoder_a(errors="ignore") + id_b: IncrementalDecoder = decoder_b(errors="ignore") + + character_match_count: int = 0 + + for i in range(255): + to_be_decoded: bytes = bytes([i]) + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) + + +def cut_sequence_chunks( + sequences: bytes, + encoding_iana: str, + offsets: range, + chunk_size: int, + bom_or_sig_available: bool, + strip_sig_or_bom: bool, + sig_payload: bytes, + is_multi_byte_decoder: bool, + decoded_payload: Optional[str] = None, +) -> Generator[str, None, None]: + + if decoded_payload and is_multi_byte_decoder is False: + for i in offsets: + chunk = decoded_payload[i : i + chunk_size] + if not chunk: + break + yield chunk + else: + for i in offsets: + chunk_end = i + chunk_size + if chunk_end > len(sequences) + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0 and sequences[i] >= 0x80: + + chunk_partial_size_chk: int = min(chunk_size, 16) + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j:chunk_end] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + yield chunk diff --git a/venv/lib/python3.8/site-packages/charset_normalizer/version.py b/venv/lib/python3.8/site-packages/charset_normalizer/version.py new file mode 100644 index 0000000..64c0dbd --- /dev/null +++ b/venv/lib/python3.8/site-packages/charset_normalizer/version.py @@ -0,0 +1,6 @@ +""" +Expose version +""" + +__version__ = "2.1.1" +VERSION = __version__.split(".") diff --git a/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/LICENSE b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/LICENSE new file mode 120000 index 0000000..92971f1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/21/7e/27/cc3c5c144d93e8306f08aaaa16f4e6067057813aca3a9147caa77192cd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/METADATA new file mode 120000 index 0000000..8ba35bd --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/84/11/8d/a822ecdc106ae37473c2d2473404b09ec9d8b5e40d0940defd3e969b2b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/RECORD new file mode 100644 index 0000000..30937d2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/RECORD @@ -0,0 +1,45 @@ +cleo-0.8.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cleo-0.8.1.dist-info/LICENSE,sha256=IX4nzDxcFE2T6DBvCKqqFvTmBnBXgTrKOpFHyqdxks0,1080 +cleo-0.8.1.dist-info/METADATA,sha256=hBGNqCLs3BBq43RzwtJHNASwnsnYteQNCUDe_T6Wmys,15868 +cleo-0.8.1.dist-info/RECORD,, +cleo-0.8.1.dist-info/WHEEL,sha256=k_UtF7DeTKrUqc0FULNabrogAWgCy8zJZKQiwD2AkC4,89 +cleo/__init__.py,sha256=l84Awpa2hgbMdqSaD4_UX1l8CRX6PB1HVRsPX1_zWYo,217 +cleo/__pycache__/__init__.cpython-38.pyc,, +cleo/__pycache__/_compat.cpython-38.pyc,, +cleo/__pycache__/application.cpython-38.pyc,, +cleo/__pycache__/helpers.cpython-38.pyc,, +cleo/__pycache__/parser.cpython-38.pyc,, +cleo/_compat.py,sha256=KZPI2l7zHDeTS_0HSQEql2BAAKOtr9McAd1TStCkKQg,1191 +cleo/application.py,sha256=Oz1dk-NJSdaYporFNdWtNB4YSuD6Ms4o-mmkRXzSl1M,1574 +cleo/commands/__init__.py,sha256=WtpAApQZMr9jEIsVyQX26TxdAA0JcssQuOTI7_igRZc,136 +cleo/commands/__pycache__/__init__.cpython-38.pyc,, +cleo/commands/__pycache__/base_command.cpython-38.pyc,, +cleo/commands/__pycache__/command.cpython-38.pyc,, +cleo/commands/__pycache__/completions_command.cpython-38.pyc,, +cleo/commands/base_command.py,sha256=tQ-fGQSJd0vjMPsbOOock89cLyY9rkwkhVg04VY-gJk,2211 +cleo/commands/command.py,sha256=UeF7cmrkwjMIHJ0pDDXE48mJaD2rP_nRFfye8cnPij0,9016 +cleo/commands/completions/__init__.py,sha256=lVL4VPz3c4kWCgjPtL4uuQLNmgwK92q1ffAsv5El02k,25 +cleo/commands/completions/__pycache__/__init__.cpython-38.pyc,, +cleo/commands/completions/__pycache__/templates.cpython-38.pyc,, +cleo/commands/completions/templates.py,sha256=n1W_i8qWGgr_sJAPzfPtolVMXgPub2rGtRce_1-bH9E,2306 +cleo/commands/completions_command.py,sha256=0EbcIu9FCcAaPRdehevatCj_qtHqDZSDL_6LGC99I24,14440 +cleo/config/__init__.py,sha256=tm7gcrb57dBixHeufARDA-khrPRZS2SM4djAYMH7vp8,51 +cleo/config/__pycache__/__init__.cpython-38.pyc,, +cleo/config/__pycache__/application_config.cpython-38.pyc,, +cleo/config/application_config.py,sha256=d-qk3W3tm_7GkYbU0UvlRK9hdaTPMm_A7QaVWmi0nQE,374 +cleo/helpers.py,sha256=LSrdQ9TPxLDutP2HOm8PiKS6_CdUVeghCt4RlVSDF9k,1084 +cleo/io/__init__.py,sha256=2kvw7SgHQZpTIilRT6yb__i81CeA7q5MyKb8gktQ_Rs,72 +cleo/io/__pycache__/__init__.cpython-38.pyc,, +cleo/io/__pycache__/buffered_io.cpython-38.pyc,, +cleo/io/__pycache__/console_io.cpython-38.pyc,, +cleo/io/__pycache__/io_mixin.cpython-38.pyc,, +cleo/io/buffered_io.py,sha256=3QBw4jilZEl5GulQVNra-qIbhtJYOaf1MO14CX_tAXM,306 +cleo/io/console_io.py,sha256=jJyEfPipQYoVW8OY01O_7qTBVNpkA5mBztUJIeouaHI,291 +cleo/io/io_mixin.py,sha256=riCrpNS_rt-FP2Z1Z_oMsayX5Ac4b6BJlNq7r-wqJ_Q,3259 +cleo/parser.py,sha256=e3-WY6V0lxub7wNmT3c0tf5NB3h4ym6w0LA7eI2oJvk,5355 +cleo/testers/__init__.py,sha256=4dtttYeCtN_QavajNxI8eGOv81uQE9r7RB0GgBjaw-s,173 +cleo/testers/__pycache__/__init__.cpython-38.pyc,, +cleo/testers/__pycache__/application_tester.cpython-38.pyc,, +cleo/testers/__pycache__/command_tester.cpython-38.pyc,, +cleo/testers/application_tester.py,sha256=ARy1YZLSisQljCNNzspGQm_yi8DByvIfKT_HaUdDAOY,1456 +cleo/testers/command_tester.py,sha256=MnyciPDRVzLMruQo3Fm1AmOPK_tb2syBQqI5TA1rQ5E,1918 diff --git a/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/WHEEL new file mode 120000 index 0000000..3817540 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo-0.8.1.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/93/f5/2d/17b0de4caad4a9cd0550b35a6eba20016802cbccc964a422c03d80902e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/__init__.py b/venv/lib/python3.8/site-packages/cleo/__init__.py new file mode 120000 index 0000000..bf76e82 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/97/ce/00/c296b68606cc76a49a0f8fd45f597c0915fa3c1d47551b0f5f5ff3598a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..e72034b Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/__pycache__/_compat.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000..77d6910 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/__pycache__/_compat.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/__pycache__/application.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/__pycache__/application.cpython-38.pyc new file mode 100644 index 0000000..163f6df Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/__pycache__/application.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/__pycache__/helpers.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/__pycache__/helpers.cpython-38.pyc new file mode 100644 index 0000000..e3307dc Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/__pycache__/helpers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/__pycache__/parser.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/__pycache__/parser.cpython-38.pyc new file mode 100644 index 0000000..85480a3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/__pycache__/parser.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/_compat.py b/venv/lib/python3.8/site-packages/cleo/_compat.py new file mode 120000 index 0000000..efd70f6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/_compat.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/29/93/c8/da5ef31c37934bfd0749012a97604000a3adafd31c01dd534ad0a42908 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/application.py b/venv/lib/python3.8/site-packages/cleo/application.py new file mode 120000 index 0000000..8561297 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/application.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3b/3d/5d/93e34949d698a68ac535d5ad341e184ae0fa32ce28fa69a4457cd29753 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/commands/__init__.py b/venv/lib/python3.8/site-packages/cleo/commands/__init__.py new file mode 120000 index 0000000..721f547 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/commands/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5a/da/40/02941932bf63108b15c905f6e93c5d000d0972cb10b8e4c8eff8a04597 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..bb09b87 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/base_command.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/base_command.cpython-38.pyc new file mode 100644 index 0000000..d140c5d Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/base_command.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/command.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/command.cpython-38.pyc new file mode 100644 index 0000000..708f19f Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/command.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/completions_command.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/completions_command.cpython-38.pyc new file mode 100644 index 0000000..6f0eef7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/commands/__pycache__/completions_command.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/commands/base_command.py b/venv/lib/python3.8/site-packages/cleo/commands/base_command.py new file mode 120000 index 0000000..6f86723 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/commands/base_command.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b5/0f/9f/190489774be330fb1b38ea1c93cf5c2f263dae4c24855834e1563e8099 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/commands/command.py b/venv/lib/python3.8/site-packages/cleo/commands/command.py new file mode 120000 index 0000000..9917d6e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/commands/command.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/51/e1/7b/726ae4c233081c9d290c35c4e3c989683dab3ff9d115fc9ef1c9cf8a3d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/commands/completions/__init__.py b/venv/lib/python3.8/site-packages/cleo/commands/completions/__init__.py new file mode 120000 index 0000000..a250203 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/commands/completions/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/95/52/f8/54fcf77389160a08cfb4be2eb902cd9a0c0af76ab57df02cbf9125d369 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/commands/completions/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/commands/completions/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..dafd045 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/commands/completions/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/commands/completions/__pycache__/templates.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/commands/completions/__pycache__/templates.cpython-38.pyc new file mode 100644 index 0000000..ffbec8b Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/commands/completions/__pycache__/templates.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/commands/completions/templates.py b/venv/lib/python3.8/site-packages/cleo/commands/completions/templates.py new file mode 120000 index 0000000..4f2694b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/commands/completions/templates.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9f/55/bf/8bca961a0affb0900fcdf3eda2554c5e03ee6f6ac6b5171eff5f9b1fd1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/commands/completions_command.py b/venv/lib/python3.8/site-packages/cleo/commands/completions_command.py new file mode 120000 index 0000000..3ba8d75 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/commands/completions_command.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d0/46/dc/22ef4509c01a3d175e85ebdab428ffaad1ea0d94832ffe8b182f7d236e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/config/__init__.py b/venv/lib/python3.8/site-packages/cleo/config/__init__.py new file mode 120000 index 0000000..9d5a5d2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/config/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b6/6e/e0/72b6f9edd062c477ae7c044303e921acf4594b648ce1d8c060c1fbbe9f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/config/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/config/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..822cde9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/config/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/config/__pycache__/application_config.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/config/__pycache__/application_config.cpython-38.pyc new file mode 100644 index 0000000..1aeb772 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/config/__pycache__/application_config.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/config/application_config.py b/venv/lib/python3.8/site-packages/cleo/config/application_config.py new file mode 120000 index 0000000..435cd9f --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/config/application_config.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/77/ea/a4/dd6ded9bfec69186d4d14be544af6175a4cf326fc0ed06955a68b49d01 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/helpers.py b/venv/lib/python3.8/site-packages/cleo/helpers.py new file mode 120000 index 0000000..9ca76d4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/helpers.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2d/2a/dd/43d4cfc4b0eeb4fd873a6f0f88a4bafc275455e8210ade1195548317d9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/io/__init__.py b/venv/lib/python3.8/site-packages/cleo/io/__init__.py new file mode 120000 index 0000000..1a4ec7e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/io/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/da/4b/f0/ed2807419a532229514fac9bfff8bcd42780eeae4cc8a6fc824b50fd1b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/io/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/io/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..cd1a397 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/io/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/io/__pycache__/buffered_io.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/io/__pycache__/buffered_io.cpython-38.pyc new file mode 100644 index 0000000..4885724 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/io/__pycache__/buffered_io.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/io/__pycache__/console_io.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/io/__pycache__/console_io.cpython-38.pyc new file mode 100644 index 0000000..4a34abc Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/io/__pycache__/console_io.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/io/__pycache__/io_mixin.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/io/__pycache__/io_mixin.cpython-38.pyc new file mode 100644 index 0000000..6e0b62a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/io/__pycache__/io_mixin.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/io/buffered_io.py b/venv/lib/python3.8/site-packages/cleo/io/buffered_io.py new file mode 120000 index 0000000..1602fa9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/io/buffered_io.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/dd/00/70/e238a56449791ae95054dadafaa21b86d25839a7f530ed78097fed0173 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/io/console_io.py b/venv/lib/python3.8/site-packages/cleo/io/console_io.py new file mode 120000 index 0000000..725354b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/io/console_io.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8c/9c/84/7cf8a9418a155bc398d353bfeea4c154da64039981ced50921ea2e6872 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/io/io_mixin.py b/venv/lib/python3.8/site-packages/cleo/io/io_mixin.py new file mode 120000 index 0000000..4ac63a0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/io/io_mixin.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ae/20/ab/a4d4bfaedf853f667567fa0cb1ac97e407386fa04994dabbafec2a27f4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/parser.py b/venv/lib/python3.8/site-packages/cleo/parser.py new file mode 120000 index 0000000..8fea4be --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/parser.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7b/7f/96/63a574971b9bef03664f7734b5fe4d077878ca6eb0d0b03b788da826f9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/testers/__init__.py b/venv/lib/python3.8/site-packages/cleo/testers/__init__.py new file mode 120000 index 0000000..cfb803f --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/testers/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e1/db/6d/b58782b4dfd06af6a337123c7863aff35b9013dafb441d068018dac3eb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/testers/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/testers/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..3b6c137 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/testers/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/testers/__pycache__/application_tester.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/testers/__pycache__/application_tester.cpython-38.pyc new file mode 100644 index 0000000..0389122 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/testers/__pycache__/application_tester.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/testers/__pycache__/command_tester.cpython-38.pyc b/venv/lib/python3.8/site-packages/cleo/testers/__pycache__/command_tester.cpython-38.pyc new file mode 100644 index 0000000..dad0031 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cleo/testers/__pycache__/command_tester.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cleo/testers/application_tester.py b/venv/lib/python3.8/site-packages/cleo/testers/application_tester.py new file mode 120000 index 0000000..b549fd3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/testers/application_tester.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/01/1c/b5/6192d28ac4258c234dceca46426ff28bc0c1caf21f293fc769474300e6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cleo/testers/command_tester.py b/venv/lib/python3.8/site-packages/cleo/testers/command_tester.py new file mode 120000 index 0000000..6e42d25 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cleo/testers/command_tester.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/32/7c/9c/88f0d15732ccaee428dc59b502638f2bfb5bdacc8142a2394c0d6b4391 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/LICENSE.rst new file mode 120000 index 0000000..52412e9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/LICENSE.rst @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9a/8a/d1/06a394e853bfe21f42f4e72d592819a22805d991b5f3275029292b658d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/METADATA b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/METADATA new file mode 120000 index 0000000..8db0d8c --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b4/52/48/5f994e8f1edce4b8d96dd4cf1550c94a0caff45ef85d8d1708fa7f8277 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/RECORD b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/RECORD new file mode 100644 index 0000000..b17e45f --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/RECORD @@ -0,0 +1,41 @@ +click-8.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.1.3.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-8.1.3.dist-info/METADATA,sha256=tFJIX5lOjx7c5LjZbdTPFVDJSgyv9F74XY0XCPp_gnc,3247 +click-8.1.3.dist-info/RECORD,, +click-8.1.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click-8.1.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +click-8.1.3.dist-info/direct_url.json,sha256=z89abGMNM2FbtPWoC_nSsRwnExzkTjRZTvLd28vXEi8,247 +click-8.1.3.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click/__init__.py,sha256=rQBLutqg-z6m8nOzivIfigDn_emijB_dKv9BZ2FNi5s,3138 +click/__pycache__/__init__.cpython-38.pyc,, +click/__pycache__/_compat.cpython-38.pyc,, +click/__pycache__/_termui_impl.cpython-38.pyc,, +click/__pycache__/_textwrap.cpython-38.pyc,, +click/__pycache__/_winconsole.cpython-38.pyc,, +click/__pycache__/core.cpython-38.pyc,, +click/__pycache__/decorators.cpython-38.pyc,, +click/__pycache__/exceptions.cpython-38.pyc,, +click/__pycache__/formatting.cpython-38.pyc,, +click/__pycache__/globals.cpython-38.pyc,, +click/__pycache__/parser.cpython-38.pyc,, +click/__pycache__/shell_completion.cpython-38.pyc,, +click/__pycache__/termui.cpython-38.pyc,, +click/__pycache__/testing.cpython-38.pyc,, +click/__pycache__/types.cpython-38.pyc,, +click/__pycache__/utils.cpython-38.pyc,, +click/_compat.py,sha256=JIHLYs7Jzz4KT9t-ds4o4jBzLjnwCiJQKqur-5iwCKI,18810 +click/_termui_impl.py,sha256=qK6Cfy4mRFxvxE8dya8RBhLpSC8HjF-lvBc6aNrPdwg,23451 +click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 +click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 +click/core.py,sha256=mz87bYEKzIoNYEa56BFAiOJnvt1Y0L-i7wD4_ZecieE,112782 +click/decorators.py,sha256=yo3zvzgUm5q7h5CXjyV6q3h_PJAiUaem178zXwdWUFI,16350 +click/exceptions.py,sha256=7gDaLGuFZBeCNwY9ERMsF2-Z3R9Fvq09Zc6IZSKjseo,9167 +click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 +click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961 +click/parser.py,sha256=cAEt1uQR8gq3-S9ysqbVU-fdAZNvilxw4ReJ_T1OQMk,19044 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=qOp_BeC9esEOSZKyu5G7RIxEUaLsXUX-mTb7hB1r4QY,18018 +click/termui.py,sha256=ACBQVOvFCTSqtD5VREeCAdRtlHd-Imla-Lte4wSfMjA,28355 +click/testing.py,sha256=ptpMYgRY7dVfE3UDgkgwayu9ePw98sQI3D7zZXiCpj4,16063 +click/types.py,sha256=rEb1aZSQKq3ciCMmjpG2Uva9vk498XRL7ThrcK2GRss,35805 +click/utils.py,sha256=33D6E7poH_nrKB-xr-UyDEXnxOcCiQqxuRLtrqeVv6o,18682 diff --git a/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/WHEEL b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/WHEEL new file mode 120000 index 0000000..7e89bc1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1b/5e/87/e00dc87a84269cead8578b9e6462928e18a95f1f3373c9eef451a5bcc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/direct_url.json new file mode 100644 index 0000000..b31e802 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, "url": "https://files.pythonhosted.org/packages/c2/f1/df59e28c642d583f7dacffb1e0965d0e00b218e0186d7858ac5233dce840/click-8.1.3-py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/top_level.txt new file mode 120000 index 0000000..0a3c828 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.1.3.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/27/56/50/a206a5612e29a6163f94f102a0d31fc341f34d2ad98250b861fc28e310 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/__init__.py b/venv/lib/python3.8/site-packages/click/__init__.py new file mode 120000 index 0000000..370438a --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ad/00/4b/badaa0fb3ea6f273b38af21f8a00e7fde9a28c1fdd2aff4167614d8b9b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..bcef436 Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/_compat.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000..0a33ddc Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/_compat.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/_termui_impl.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/_termui_impl.cpython-38.pyc new file mode 100644 index 0000000..0fb63f5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/_termui_impl.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/_textwrap.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/_textwrap.cpython-38.pyc new file mode 100644 index 0000000..974d82b Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/_textwrap.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/_winconsole.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/_winconsole.cpython-38.pyc new file mode 100644 index 0000000..e5b94c8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/_winconsole.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/core.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/core.cpython-38.pyc new file mode 100644 index 0000000..71c091e Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/core.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/decorators.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/decorators.cpython-38.pyc new file mode 100644 index 0000000..e4027f6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/decorators.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..de6561d Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/formatting.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/formatting.cpython-38.pyc new file mode 100644 index 0000000..bb86fdd Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/formatting.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/globals.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/globals.cpython-38.pyc new file mode 100644 index 0000000..cb75c27 Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/globals.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/parser.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/parser.cpython-38.pyc new file mode 100644 index 0000000..51c4802 Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/parser.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/shell_completion.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/shell_completion.cpython-38.pyc new file mode 100644 index 0000000..09cf101 Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/shell_completion.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/termui.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/termui.cpython-38.pyc new file mode 100644 index 0000000..2846649 Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/termui.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/testing.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/testing.cpython-38.pyc new file mode 100644 index 0000000..f21d59e Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/testing.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/types.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/types.cpython-38.pyc new file mode 100644 index 0000000..c016a8d Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/types.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/click/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..02c5bea Binary files /dev/null and b/venv/lib/python3.8/site-packages/click/__pycache__/utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/click/_compat.py b/venv/lib/python3.8/site-packages/click/_compat.py new file mode 120000 index 0000000..0a06ef7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/_compat.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/24/81/cb/62cec9cf3e0a4fdb7e76ce28e230732e39f00a22502aababfb98b008a2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/_termui_impl.py b/venv/lib/python3.8/site-packages/click/_termui_impl.py new file mode 120000 index 0000000..c8168cd --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/_termui_impl.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a8/ae/82/7f2e26445c6fc44f1dc9af110612e9482f078c5fa5bc173a68dacf7708 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/_textwrap.py b/venv/lib/python3.8/site-packages/click/_textwrap.py new file mode 120000 index 0000000..eeb0063 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/_textwrap.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d7/47/d0/eb839c05432e2bb985be1f37eb7feea0ec4f95122d64198acd12438286 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/_winconsole.py b/venv/lib/python3.8/site-packages/click/_winconsole.py new file mode 120000 index 0000000..5d8a9db --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/_winconsole.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e6/3b/b7/8d091c643d16dbb584306aa610fe32fdcad45733c2aac5ff1586fb04ed \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/core.py b/venv/lib/python3.8/site-packages/click/core.py new file mode 120000 index 0000000..0b9f5a0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/core.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9b/3f/3b/6d810acc8a0d6046b9e8114088e267bedd58d0bfa2ef00f8fd979c89e1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/decorators.py b/venv/lib/python3.8/site-packages/click/decorators.py new file mode 120000 index 0000000..012e3f3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/decorators.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ca/8d/f3/bf38149b9abb8790978f257aab787f3c902251a7a6d7bf335f07565052 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/exceptions.py b/venv/lib/python3.8/site-packages/click/exceptions.py new file mode 120000 index 0000000..8bad9bf --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/exceptions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ee/00/da/2c6b8564178237063d11132c176f99dd1f45bead3d65ce886522a3b1ea \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/formatting.py b/venv/lib/python3.8/site-packages/click/formatting.py new file mode 120000 index 0000000..aca19d6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/formatting.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/16/b7/f4/fb95b7dfe968c98fe2f6aaf05d1f3e4939d6de6e60bf2c4b554772c729 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/globals.py b/venv/lib/python3.8/site-packages/click/globals.py new file mode 120000 index 0000000..836c679 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/globals.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4c/ff/aa/33cf124f373b7f5dbb877e530ffbfddb41607ce0f6130cea034a04f175 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/parser.py b/venv/lib/python3.8/site-packages/click/parser.py new file mode 120000 index 0000000..c635a77 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/parser.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/70/01/2d/d6e411f20ab7f92f72b2a6d553e7dd01936f8a5c70e11789fd3d4e40c9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/py.typed b/venv/lib/python3.8/site-packages/click/py.typed new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/shell_completion.py b/venv/lib/python3.8/site-packages/click/shell_completion.py new file mode 120000 index 0000000..dbf66fd --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/shell_completion.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a8/ea/7f/05e0bd7ac10e4992b2bb91bb448c4451a2ec5d45fe9936fb841d6be106 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/termui.py b/venv/lib/python3.8/site-packages/click/termui.py new file mode 120000 index 0000000..97800e3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/termui.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/00/20/50/54ebc50934aab43e5544478201d46d94777e22695af8bb5ee3049f3230 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/testing.py b/venv/lib/python3.8/site-packages/click/testing.py new file mode 120000 index 0000000..57d6339 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/testing.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a6/da/4c/620458edd55f1375038248306b2bbd78fc3df2c408dc3ef3657882a63e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/types.py b/venv/lib/python3.8/site-packages/click/types.py new file mode 120000 index 0000000..a44da12 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/types.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ac/46/f5/6994902aaddc8823268e91b652f6bdbe4e3df1744bed386b70ad8646cb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/click/utils.py b/venv/lib/python3.8/site-packages/click/utils.py new file mode 120000 index 0000000..44ef103 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/df/70/fa/13ba681ff9eb281fb1afe5320c45e7c4e702890ab1b912edaea795bfaa \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/LICENSE b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/LICENSE new file mode 120000 index 0000000..1cb34fe --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f2/f9/b4/60ba719da6626add264d3782f275a4ff7aab677beda08b330911e23adb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/METADATA new file mode 120000 index 0000000..bc1c8a7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/55/02/01/80bdc8a90831817c7ceda0b0243f1892d000d6adda4727f0b89af67ebe \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/RECORD new file mode 100644 index 0000000..b2253f9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/RECORD @@ -0,0 +1,255 @@ +clikit-0.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +clikit-0.6.2.dist-info/LICENSE,sha256=8vm0YLpxnaZiat0mTTeC8nWk_3qrZ3vtoIszCRHiOts,1062 +clikit-0.6.2.dist-info/METADATA,sha256=VQIBgL3IqQgxgXx87aCwJD8YktAA1q3aRyfwuJr2fr4,1581 +clikit-0.6.2.dist-info/RECORD,, +clikit-0.6.2.dist-info/WHEEL,sha256=BcHwX75L5o9y8NN94BU-O_IP_7AP-vrh-M6RWGoded0,87 +clikit/__init__.py,sha256=672ya2xDu_gkHfPFkW8oh4YATmywus0kDO_H5-IvsCw,208 +clikit/__pycache__/__init__.cpython-38.pyc,, +clikit/__pycache__/console_application.cpython-38.pyc,, +clikit/adapter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +clikit/adapter/__pycache__/__init__.cpython-38.pyc,, +clikit/adapter/__pycache__/style_converter.cpython-38.pyc,, +clikit/adapter/style_converter.py,sha256=jdauKvcNCaSF_LatoaO942wU00Sh24aY-74hq4IEcR8,860 +clikit/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +clikit/api/__pycache__/__init__.cpython-38.pyc,, +clikit/api/__pycache__/exceptions.cpython-38.pyc,, +clikit/api/application/__init__.py,sha256=FoOc_AS7QiydMuCon44T4ZiOCyIEjr27cH5uT618-OE,37 +clikit/api/application/__pycache__/__init__.cpython-38.pyc,, +clikit/api/application/__pycache__/application.cpython-38.pyc,, +clikit/api/application/application.py,sha256=Tv2Me8HWf44ULl7kh59qctwvnr-rr1nP4fo-S5XbR68,1775 +clikit/api/args/__init__.py,sha256=sZQSI_WC4tcrpqsuxnEcAYeS1ZbcOTxWJYgI9sheTcs,89 +clikit/api/args/__pycache__/__init__.cpython-38.pyc,, +clikit/api/args/__pycache__/args.cpython-38.pyc,, +clikit/api/args/__pycache__/args_parser.cpython-38.pyc,, +clikit/api/args/__pycache__/exceptions.cpython-38.pyc,, +clikit/api/args/__pycache__/raw_args.cpython-38.pyc,, +clikit/api/args/args.py,sha256=3OQ5m6dWfShmQxbNqwgAJg9gsx20dASMd4S7C_IFP5Y,3837 +clikit/api/args/args_parser.py,sha256=chddl-5uf83oreakhusFWAFWZstuTgdTIbslWHY_Q1M,350 +clikit/api/args/exceptions.py,sha256=FSRjsZQJHf2EhDDLQZG3ehy7KnDVYnFjGgDNTvilZZA,1999 +clikit/api/args/format/__init__.py,sha256=GgBjypUc6-OOeOXxZcavu5l7Qu_OdlqKcusTGHZHIZE,225 +clikit/api/args/format/__pycache__/__init__.cpython-38.pyc,, +clikit/api/args/format/__pycache__/abstract_option.cpython-38.pyc,, +clikit/api/args/format/__pycache__/args_format.cpython-38.pyc,, +clikit/api/args/format/__pycache__/args_format_builder.cpython-38.pyc,, +clikit/api/args/format/__pycache__/argument.cpython-38.pyc,, +clikit/api/args/format/__pycache__/command_name.cpython-38.pyc,, +clikit/api/args/format/__pycache__/command_option.cpython-38.pyc,, +clikit/api/args/format/__pycache__/option.cpython-38.pyc,, +clikit/api/args/format/abstract_option.py,sha256=zzqZ7dhBy-ZQKPyM-ixf-1dkvhD2h34xXhuS2gRLsfc,4360 +clikit/api/args/format/args_format.py,sha256=da0dG9Q79t-_j-m_LoThPA7w5HIvjYs55Gvrn2lhQ2c,8371 +clikit/api/args/format/args_format_builder.py,sha256=VaJl79nHpdnqz6fm6eJmuCAJNtBkjDKN8bWBq89lKQw,11308 +clikit/api/args/format/argument.py,sha256=mMYpY--eJqd5CNQpHrLGPFzVohMVXcg7-CnQtu2RR_E,5222 +clikit/api/args/format/command_name.py,sha256=R5mHuudJ0sKy47qVBMpQhI2cBaDdqTvvATsWRgJQo14,828 +clikit/api/args/format/command_option.py,sha256=tBMRIm2eAVEpYDh_GPmyDbhKq9xpjvn7g8bpWWyrXqg,1792 +clikit/api/args/format/option.py,sha256=NbSSFjH6ujR0a16_AZf-mT3dcYnozGW5dANidoEiNDA,5312 +clikit/api/args/raw_args.py,sha256=lmtJHySybizLekRblhyfjApOGg50da2GlQc0Eh02yHc,812 +clikit/api/command/__init__.py,sha256=0791MZSWZM0hNoIp54JgGEolyRxZVvbrshEmrGneAbo,79 +clikit/api/command/__pycache__/__init__.cpython-38.pyc,, +clikit/api/command/__pycache__/command.cpython-38.pyc,, +clikit/api/command/__pycache__/command_collection.cpython-38.pyc,, +clikit/api/command/__pycache__/exceptions.cpython-38.pyc,, +clikit/api/command/command.py,sha256=n4CyIcKpA02tL-u_hagjN5NQy3tbGHGiJLGu_VLxNag,5500 +clikit/api/command/command_collection.py,sha256=DgOihRbOFw9YxAo0DJlKXLdic_J2160eqCRCK1HJ5Fw,1930 +clikit/api/command/exceptions.py,sha256=cur-caKkWd7PkKgOjL725CwrO4lUGXOtE1zo1vx2ugY,683 +clikit/api/config/__init__.py,sha256=eyPIKClyLxlsbmAGfRAmUT1ZnnqNmiXNYd8mMMtZn30,92 +clikit/api/config/__pycache__/__init__.cpython-38.pyc,, +clikit/api/config/__pycache__/application_config.cpython-38.pyc,, +clikit/api/config/__pycache__/command_config.cpython-38.pyc,, +clikit/api/config/__pycache__/config.cpython-38.pyc,, +clikit/api/config/application_config.py,sha256=N_LDUeYWbBv_GNNuv1W4GovBGUGCQvT-jWNbr0TUr2E,7640 +clikit/api/config/command_config.py,sha256=9hO6llUVX0x2z13EkAJB06XJruZBzbiEA9CJo8K7kaA,7280 +clikit/api/config/config.py,sha256=2ifrvAYJ7XnUj2-hmP8azD8tjGvooPnRHadZymBzHJk,4101 +clikit/api/event/__init__.py,sha256=iK44q99DJj6jI3-FSTbg87ZKSDQKpxuZfjhDazzElHk,315 +clikit/api/event/__pycache__/__init__.cpython-38.pyc,, +clikit/api/event/__pycache__/config_event.cpython-38.pyc,, +clikit/api/event/__pycache__/console_events.cpython-38.pyc,, +clikit/api/event/__pycache__/event.cpython-38.pyc,, +clikit/api/event/__pycache__/event_dispatcher.cpython-38.pyc,, +clikit/api/event/__pycache__/pre_handle_event.cpython-38.pyc,, +clikit/api/event/__pycache__/pre_resolve_event.cpython-38.pyc,, +clikit/api/event/config_event.py,sha256=gQVVvrRueYvV0z3OQCNR7Fhi84MgjjBVW0zn6Ha7UVI,384 +clikit/api/event/console_events.py,sha256=RHK8n_BGZZ3toZBrqgaEGk1Wg-dLz760h17dg1uO7bc,74 +clikit/api/event/event.py,sha256=pJPgJesyg5JX8PPA2-zi5kHUk3AbDXXgCyIYBuGtJmo,328 +clikit/api/event/event_dispatcher.py,sha256=RZvQ1BiYj4l3cb5H0crn6YRT5_T_oOO8cXq0qEIbusg,3055 +clikit/api/event/pre_handle_event.py,sha256=EoFIuh1I_R0T5vF7je0dDVG9pU5PYNhNZjHsAOPdcEU,1166 +clikit/api/event/pre_resolve_event.py,sha256=vs6LUs0Yq1pJKCcqjw7X2xcf6gip75BQXpKtMYwu_u8,1049 +clikit/api/exceptions.py,sha256=vWBJqhYXNFBgg4rIIKN07hPiA42HmvqUSz1j9s1N6oA,97 +clikit/api/formatter/__init__.py,sha256=qyU1_SK11VRjYiwty4WTK9g5XtftpTHMPG7YUOsmW_8,90 +clikit/api/formatter/__pycache__/__init__.cpython-38.pyc,, +clikit/api/formatter/__pycache__/formatter.cpython-38.pyc,, +clikit/api/formatter/__pycache__/style.cpython-38.pyc,, +clikit/api/formatter/__pycache__/style_set.cpython-38.pyc,, +clikit/api/formatter/formatter.py,sha256=mWprIFsHO6DYF0vrA8H7FACr-GnEruTLsY-Y1W8Czgs,665 +clikit/api/formatter/style.py,sha256=lmvl2iEc26IWQO-bhS5JJT-Gemt1mkMG_xfQD2mcvC0,2810 +clikit/api/formatter/style_set.py,sha256=9MJO-uadkgbUkMtxyynlUpvh9-9DezEuEU0CN2ig-no,981 +clikit/api/io/__init__.py,sha256=wFO8kFY6aNF0NycVG4AYe5J_A5LtFv8O6ivFQJJk-SU,187 +clikit/api/io/__pycache__/__init__.cpython-38.pyc,, +clikit/api/io/__pycache__/flags.cpython-38.pyc,, +clikit/api/io/__pycache__/input.cpython-38.pyc,, +clikit/api/io/__pycache__/input_stream.cpython-38.pyc,, +clikit/api/io/__pycache__/io.cpython-38.pyc,, +clikit/api/io/__pycache__/io_exception.cpython-38.pyc,, +clikit/api/io/__pycache__/output.cpython-38.pyc,, +clikit/api/io/__pycache__/output_stream.cpython-38.pyc,, +clikit/api/io/__pycache__/section_output.cpython-38.pyc,, +clikit/api/io/flags.py,sha256=BxVtAqib2JKrxa8enKb7jgPHrbowS3Jh1wpmM1Uxwe8,254 +clikit/api/io/input.py,sha256=TmKlYFDII96sH3oM_7JAYn6x7i4Mq8imRrOItVcmzLM,1867 +clikit/api/io/input_stream.py,sha256=vPrnCvz7DYubNqUCt6ggzUgSXa-PoGi8ifKKcaDest0,731 +clikit/api/io/io.py,sha256=_t1qM3sWJB6nky1s77fl7R8Zqq2vSEgI6HowdWJ9jH0,6991 +clikit/api/io/io_exception.py,sha256=23tAhDkTgMpZw3AVHAQtwDsLxrMcSEmFDqhAj0zaU1M,92 +clikit/api/io/output.py,sha256=wuqKMJ0nsga5Brx0i1rSsSwqttnkYNrbSRqX7fs7Gjw,6291 +clikit/api/io/output_stream.py,sha256=37S8vcrlh1GJgwWKN05w3N9Us1mvDj-uJuNfDeSD1U0,857 +clikit/api/io/section_output.py,sha256=sRj6HRSCHm6RrI_m4UEDImshBDO45EniRXbhQeekVBY,3088 +clikit/api/resolver/__init__.py,sha256=8Jatu87gG0XzW9Gc6u-W4TWbtsnDJvsTc_fitbIYJwA,92 +clikit/api/resolver/__pycache__/__init__.cpython-38.pyc,, +clikit/api/resolver/__pycache__/command_resolver.cpython-38.pyc,, +clikit/api/resolver/__pycache__/exceptions.cpython-38.pyc,, +clikit/api/resolver/__pycache__/resolved_command.cpython-38.pyc,, +clikit/api/resolver/command_resolver.py,sha256=tJzXF9CmCkYuHmeq3rCJ848gDcOZwE-eqCUGAKUV4lg,344 +clikit/api/resolver/exceptions.py,sha256=s5syhgaE4KsacyBfPj3wLCBIqSp4QdYfqT02btZ8o34,779 +clikit/api/resolver/resolved_command.py,sha256=mS0sHUQ-P9BMRv6eP-iCvfsdOeEVbqmCuB-FitsIpps,447 +clikit/args/__init__.py,sha256=YuDLMbpDBTRRX8RsDGFwqp9Qi83P0x_U2s9pAQQguRs,119 +clikit/args/__pycache__/__init__.cpython-38.pyc,, +clikit/args/__pycache__/argv_args.cpython-38.pyc,, +clikit/args/__pycache__/default_args_parser.cpython-38.pyc,, +clikit/args/__pycache__/string_args.cpython-38.pyc,, +clikit/args/__pycache__/token_parser.cpython-38.pyc,, +clikit/args/argv_args.py,sha256=7FiSchlyhz40VzsYK7uJsaSO4XD9x05chjDVLKAppwI,1262 +clikit/args/default_args_parser.py,sha256=B_F9Mn3G8RC6dPRPxKT2qlfsq7Vk0o-KVslXObl0oy8,11723 +clikit/args/inputs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +clikit/args/inputs/__pycache__/__init__.cpython-38.pyc,, +clikit/args/string_args.py,sha256=YSQnT9_KSKqnNwibwJ6VhDBFJ9VIbwBA88iMrIH2ScI,1067 +clikit/args/token_parser.py,sha256=8LW5qEck6mEwcwgfPBwwGTo1tfu88q5P7zNDFIrvRRQ,3031 +clikit/config/__init__.py,sha256=dxmVY0lZ0-Htf-Kkm7MnrFW541lkeeIBnQG10Q5y0LI,65 +clikit/config/__pycache__/__init__.cpython-38.pyc,, +clikit/config/__pycache__/default_application_config.cpython-38.pyc,, +clikit/config/default_application_config.py,sha256=lhKuZosg23WQ9V4H7nsNoVckCVXc18E13sxMAjqZK5w,5874 +clikit/console_application.py,sha256=quUwTfCWa402UgJLp3Lyn-6f4v8KUFyAhsdGIMecQbg,6000 +clikit/formatter/__init__.py,sha256=5goy4-OUK8L7uUmsrpkFLdQjlRAqktlYyzu1D5qYMmI,175 +clikit/formatter/__pycache__/__init__.cpython-38.pyc,, +clikit/formatter/__pycache__/ansi_formatter.cpython-38.pyc,, +clikit/formatter/__pycache__/default_style_set.cpython-38.pyc,, +clikit/formatter/__pycache__/null_formatter.cpython-38.pyc,, +clikit/formatter/__pycache__/plain_formatter.cpython-38.pyc,, +clikit/formatter/ansi_formatter.py,sha256=1PI5JSHWVXuTmFIM0LUaGtpYNAFbJ-_fuCAD4STLF-8,1868 +clikit/formatter/default_style_set.py,sha256=FByMJFRdM4sBTusTQUM3tm8AWD0c9jKSSS4EdFLGyZM,600 +clikit/formatter/null_formatter.py,sha256=S13YSVuwaoqpcWr-wnGg0DcvegSC7HJ-c-uVMSnUjbw,496 +clikit/formatter/plain_formatter.py,sha256=AFGtMvG_2bUpfKia7I8zxEKZzLd2nJlU0C5hcGldXCg,1533 +clikit/handler/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +clikit/handler/__pycache__/__init__.cpython-38.pyc,, +clikit/handler/__pycache__/callback_handler.cpython-38.pyc,, +clikit/handler/callback_handler.py,sha256=mkmYrJKbU9sEsQBXe5xV-V4o6EZUz-76vy2Z54M09lY,385 +clikit/handler/help/__init__.py,sha256=g6dt8GiW2Nw6S2KDpGB86L4H9fnvmQY4ha9IphC9AJU,47 +clikit/handler/help/__pycache__/__init__.cpython-38.pyc,, +clikit/handler/help/__pycache__/help_handler.cpython-38.pyc,, +clikit/handler/help/__pycache__/help_text_handler.cpython-38.pyc,, +clikit/handler/help/help_handler.py,sha256=oILYXTH4H8GQUyTCCdL27V1X9BVyYbsgduuXP_HrRps,238 +clikit/handler/help/help_text_handler.py,sha256=50fAdymanRcJG-QKuLK1Wu1pryYjB0zYTHgohrveHi0,862 +clikit/io/__init__.py,sha256=CFfM1isCq0Ox3jcgKy_ti36LjEIKb-WCqQLV4Ek5hP4,98 +clikit/io/__pycache__/__init__.cpython-38.pyc,, +clikit/io/__pycache__/buffered_io.cpython-38.pyc,, +clikit/io/__pycache__/console_io.cpython-38.pyc,, +clikit/io/__pycache__/null_io.cpython-38.pyc,, +clikit/io/buffered_io.py,sha256=ONUdINgS6TdAdUZm58kiQre89qrV4f0caQbYszLM-Oo,1662 +clikit/io/console_io.py,sha256=YLq0LiTZkJMCaNvY5zfFrHaUaNclCUovi2ru-I-LQig,1605 +clikit/io/input_stream/__init__.py,sha256=vn5CqtOFVFzTFufMZmqE9JR2DMQfv7kEUxJ3EIOKnSQ,204 +clikit/io/input_stream/__pycache__/__init__.cpython-38.pyc,, +clikit/io/input_stream/__pycache__/null_input_stream.cpython-38.pyc,, +clikit/io/input_stream/__pycache__/standard_input_stream.cpython-38.pyc,, +clikit/io/input_stream/__pycache__/stream_input_stream.cpython-38.pyc,, +clikit/io/input_stream/__pycache__/string_input_stream.cpython-38.pyc,, +clikit/io/input_stream/null_input_stream.py,sha256=oX5Q-RXznMBaIGYMIdWXJ_7MKUDlTK_iHCsrpwfWcec,717 +clikit/io/input_stream/standard_input_stream.py,sha256=Q1EusqAASNZBNkqj8KlQ4R-zEzjtZ6uhFokqv1BNhs4,289 +clikit/io/input_stream/stream_input_stream.py,sha256=HRwmNsi6rQ64jniCA4REkabKPfrN_PlB0cFyDxSOakM,1429 +clikit/io/input_stream/string_input_stream.py,sha256=1-5Zcb2d9LWFeERQlcFybNXhG8UFGtXtDoh1LzgIzcQ,879 +clikit/io/null_io.py,sha256=3EeBEC1kDgUeE6J2qa1bjHQplkqjwCTlgho1ZLvnZ9w,494 +clikit/io/output_stream/__init__.py,sha256=ZNqXJn6KLAB58ZPw24wHz0jOVgvfoluHa6WwwKBivBY,267 +clikit/io/output_stream/__pycache__/__init__.cpython-38.pyc,, +clikit/io/output_stream/__pycache__/buffered_output_stream.cpython-38.pyc,, +clikit/io/output_stream/__pycache__/error_output_stream.cpython-38.pyc,, +clikit/io/output_stream/__pycache__/null_output_stream.cpython-38.pyc,, +clikit/io/output_stream/__pycache__/standard_output_stream.cpython-38.pyc,, +clikit/io/output_stream/__pycache__/stream_output_stream.cpython-38.pyc,, +clikit/io/output_stream/buffered_output_stream.py,sha256=U9L9epHTfAj3Zxwsw0lEwKFoTBuSnMNuU_TDjSf_vZc,1315 +clikit/io/output_stream/error_output_stream.py,sha256=yyoW9GX3vKDawDQt5PnXyvrvY8_18nP5jKXYvsnnuiQ,287 +clikit/io/output_stream/null_output_stream.py,sha256=GF_2uHunXMQXvdZXSzTwzDHDAanNwGs8eivljK3EiMM,799 +clikit/io/output_stream/standard_output_stream.py,sha256=IzKfjNrH3yMokxQOwWNrpUynuNPbaFjlJqT0enNsU1w,296 +clikit/io/output_stream/stream_output_stream.py,sha256=3vig_hyp0dODsoQkzq4fojSG3wlmieWV_zamCn2TX-U,3314 +clikit/resolver/__init__.py,sha256=Px1AAiadk2G-y-zaZzxQQGOZn3EPTnVh7s9iDn4riyg,46 +clikit/resolver/__pycache__/__init__.cpython-38.pyc,, +clikit/resolver/__pycache__/default_resolver.cpython-38.pyc,, +clikit/resolver/__pycache__/help_resolver.cpython-38.pyc,, +clikit/resolver/__pycache__/resolve_result.cpython-38.pyc,, +clikit/resolver/default_resolver.py,sha256=qrRq3EuP3caXg5wcVz6rlzs-JWqMYeqsdXvPNF29nCU,5684 +clikit/resolver/help_resolver.py,sha256=_A78t21jZLsHYDFDict0-sgha_V_sBS9tzie8kBMHVc,1036 +clikit/resolver/resolve_result.py,sha256=DiSzIMeo01-rzfaZ1ARKk9HQuLm8CkKuYqSeoCDr28A,1394 +clikit/ui/__init__.py,sha256=60I_pzuR0mkk9uBc_oLK78uo1VLcSnqk55HU8muONag,66 +clikit/ui/__pycache__/__init__.cpython-38.pyc,, +clikit/ui/__pycache__/component.cpython-38.pyc,, +clikit/ui/__pycache__/rectangle.cpython-38.pyc,, +clikit/ui/alignment/__init__.py,sha256=z6aQyThJze0J1Pk0mNXCVlW8Ipk52n0rfviV4EHSA5w,44 +clikit/ui/alignment/__pycache__/__init__.cpython-38.pyc,, +clikit/ui/alignment/__pycache__/label_alignment.cpython-38.pyc,, +clikit/ui/alignment/label_alignment.py,sha256=W2Gnw2FNJHkrIeH2pcJYrPlI5FG2xnIRuYL5zNNCMEg,1212 +clikit/ui/component.py,sha256=6kXOcriR5JawgA4XRHaJ0Ue-Klj6MeI0V0GlK2JyBCM,229 +clikit/ui/components/__init__.py,sha256=tPsp8TeueoiQHeCPlw6ridPJ5WS5lLwk4iNhf3Zu58U,515 +clikit/ui/components/__pycache__/__init__.cpython-38.pyc,, +clikit/ui/components/__pycache__/border_util.cpython-38.pyc,, +clikit/ui/components/__pycache__/cell_wrapper.cpython-38.pyc,, +clikit/ui/components/__pycache__/choice_question.cpython-38.pyc,, +clikit/ui/components/__pycache__/confirmation_question.cpython-38.pyc,, +clikit/ui/components/__pycache__/empty_line.cpython-38.pyc,, +clikit/ui/components/__pycache__/exception_trace.cpython-38.pyc,, +clikit/ui/components/__pycache__/labeled_paragraph.cpython-38.pyc,, +clikit/ui/components/__pycache__/name_version.cpython-38.pyc,, +clikit/ui/components/__pycache__/paragraph.cpython-38.pyc,, +clikit/ui/components/__pycache__/progress_bar.cpython-38.pyc,, +clikit/ui/components/__pycache__/progress_indicator.cpython-38.pyc,, +clikit/ui/components/__pycache__/question.cpython-38.pyc,, +clikit/ui/components/__pycache__/table.cpython-38.pyc,, +clikit/ui/components/border_util.py,sha256=NqkT5mwJfg9jbxZ4I_IX9X26mMfGSIYWuRZ7RQPGTjI,4840 +clikit/ui/components/cell_wrapper.py,sha256=5OFhcsXYyHoTGDc4oNLhs9-hAzOcNBBDQxjlB4DKFlQ,7243 +clikit/ui/components/choice_question.py,sha256=J1-9YMPlOliKPiFieuvNQdB-XuCEbZao7089QtzaxXE,4148 +clikit/ui/components/confirmation_question.py,sha256=_GJtLWjxSyUvW7gKAbRO9pzjb4KTEjKbJb9dKeJyu9k,992 +clikit/ui/components/empty_line.py,sha256=jGEfFp12wpGRboEPr_BxEpust_vYEoK6kD3eseG9RY8,218 +clikit/ui/components/exception_trace.py,sha256=GUCWdyF9gAVm8yCxKZsiRmypAUG1ef0_bt_YBvE04To,15913 +clikit/ui/components/labeled_paragraph.py,sha256=gPO8eqKokwF2UN4zwC8l0QAKN3spdd631W6niboEJO8,1990 +clikit/ui/components/name_version.py,sha256=Wj1h0V5YlWDaLIgld5HOurBJMDObbK2lh-S4lA1MoH8,880 +clikit/ui/components/paragraph.py,sha256=XD4CFVWVtisRUQil8BrgScvJ6boqvDxH85m0AHTUsZs,688 +clikit/ui/components/progress_bar.py,sha256=C8xEIOrtvH2w38f1va1BKIj5Or--C_3bS7S7Kn3XWDc,12664 +clikit/ui/components/progress_indicator.py,sha256=FIJ78hbtirvAUZKmCchM0LQaNlPBdwxkBSMM6Cc9VLA,5470 +clikit/ui/components/question.py,sha256=zRZzdtkHrorbZ_34bJyov1nYhwkWMKREfiFrxut9MGo,7607 +clikit/ui/components/table.py,sha256=jn77Mj6Ur6YOmZltaBq772ou2slmRUqP-CvtZn0B5Rw,5232 +clikit/ui/help/__init__.py,sha256=PE4DRHjZpUNQ_oVZtDgYkkg4ryT3zJgPzpkVnFVR8Xg,84 +clikit/ui/help/__pycache__/__init__.cpython-38.pyc,, +clikit/ui/help/__pycache__/abstract_help.cpython-38.pyc,, +clikit/ui/help/__pycache__/application_help.cpython-38.pyc,, +clikit/ui/help/__pycache__/command_help.cpython-38.pyc,, +clikit/ui/help/abstract_help.py,sha256=61pjHbV0z-ajdRqi5bDXmeIoduhIuBY5Q92TghL8MzU,5482 +clikit/ui/help/application_help.py,sha256=gC4XiwLKuCZSZqvkVfHF11QUtCtlVSR3iyCFXHfRXxg,3437 +clikit/ui/help/command_help.py,sha256=XTcqms6r-6MJRxfpwZxQrBidb8wAgK8FFVU2nyUV1GU,6056 +clikit/ui/layout/__init__.py,sha256=FwOogbjZsOLTotD0PlxYNIZsuzki29Yxy21VtZm8Hks,38 +clikit/ui/layout/__pycache__/__init__.cpython-38.pyc,, +clikit/ui/layout/__pycache__/block_layout.cpython-38.pyc,, +clikit/ui/layout/block_layout.py,sha256=QC-w2bR5cSghF89GlzE6Mcw4UMwynS1ShiafwS6At9w,1262 +clikit/ui/rectangle.py,sha256=N9a9cGix0dlf-4jsGm66EgIjTctSnZXHNxv5c_yYniY,89 +clikit/ui/style/__init__.py,sha256=sWYW20D0opwjM8zyqxeIgXQFdlx6YU9yhjHiILPtLVw,69 +clikit/ui/style/__pycache__/__init__.cpython-38.pyc,, +clikit/ui/style/__pycache__/alignment.cpython-38.pyc,, +clikit/ui/style/__pycache__/border_style.cpython-38.pyc,, +clikit/ui/style/__pycache__/table_style.cpython-38.pyc,, +clikit/ui/style/alignment.py,sha256=-BWCOyYJ4fiL1P8PCpUVnZarp7iMCfpOj11nQ05Z5LY,110 +clikit/ui/style/border_style.py,sha256=TSwHfFMVfpJHkWJaKW0n8PzBDOHZRP8QZ3SlRRomXp4,2540 +clikit/ui/style/table_style.py,sha256=rwLbwkBUyKAenLt2jcv5gWEsVmNZJ4QMGfasRiH3FVg,2303 +clikit/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +clikit/utils/__pycache__/__init__.cpython-38.pyc,, +clikit/utils/__pycache__/_compat.cpython-38.pyc,, +clikit/utils/__pycache__/command.cpython-38.pyc,, +clikit/utils/__pycache__/string.cpython-38.pyc,, +clikit/utils/__pycache__/terminal.cpython-38.pyc,, +clikit/utils/__pycache__/time.cpython-38.pyc,, +clikit/utils/_compat.py,sha256=E2hkXMnWifngCqoOgd5V3NeP3qhQFytPfTkfZwQehP0,1814 +clikit/utils/command.py,sha256=KzWMqmV5KO4Up_e9duIOT7F9AqJB1QGOxxWfwboXxlA,1296 +clikit/utils/string.py,sha256=jaoiWhqO5vRax2jgyO6YvEBgHUIDH7NIhPGcjTvaCiI,2442 +clikit/utils/terminal.py,sha256=VhnUJBuIYtuBQiQGSTgTOLSBaXJ1anepxF0uh_AoElE,3696 +clikit/utils/time.py,sha256=fJrFL3pNwn1DUPF0ML_GuL9Fw27eHKt9szwA5EnEeQY,476 diff --git a/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/WHEEL new file mode 120000 index 0000000..056e3c1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit-0.6.2.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/05/c1/f0/5fbe4be68f72f0d37de0153e3bf20fffb00ffafae1f8ce91586a1d79dd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/__init__.py b/venv/lib/python3.8/site-packages/clikit/__init__.py new file mode 120000 index 0000000..c59fedc --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/eb/bd/b2/6b6c43bbf8241df3c5916f288786004e6cb0bacd240cefc7e7e22fb02c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..cf07064 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/__pycache__/console_application.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/__pycache__/console_application.cpython-38.pyc new file mode 100644 index 0000000..c3d4de2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/__pycache__/console_application.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/adapter/__init__.py b/venv/lib/python3.8/site-packages/clikit/adapter/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/adapter/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/adapter/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/adapter/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d8d7350 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/adapter/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/adapter/__pycache__/style_converter.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/adapter/__pycache__/style_converter.cpython-38.pyc new file mode 100644 index 0000000..35478a8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/adapter/__pycache__/style_converter.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/adapter/style_converter.py b/venv/lib/python3.8/site-packages/clikit/adapter/style_converter.py new file mode 120000 index 0000000..5dc2500 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/adapter/style_converter.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8d/d6/ae/2af70d09a485fcb6ada1a3bde36c14d344a1db8698fbbe21ab8204711f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4e8cbad Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..1d7ef37 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/application/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/application/__init__.py new file mode 120000 index 0000000..9c49375 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/application/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/16/83/9c/fc04bb422c9d32e0a89f8e13e1988e0b22048ebdbb707e6e4fad7cf8e1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/application/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/application/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..b764e71 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/application/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/application/__pycache__/application.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/application/__pycache__/application.cpython-38.pyc new file mode 100644 index 0000000..c15a0b1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/application/__pycache__/application.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/application/application.py b/venv/lib/python3.8/site-packages/clikit/api/application/application.py new file mode 120000 index 0000000..908cfff --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/application/application.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4e/fd/8c/7bc1d67f8e142e5ee4879f6a72dc2f9ebfabaf59cfe1fa3e4b95db47af \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/args/__init__.py new file mode 120000 index 0000000..2a4dc39 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b1/94/12/23f582e2d72ba6ab2ec6711c018792d596dc393c56258808f6c85e4dcb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..3e26e15 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/args.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/args.cpython-38.pyc new file mode 100644 index 0000000..c9e36cc Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/args.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/args_parser.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/args_parser.cpython-38.pyc new file mode 100644 index 0000000..f9256ee Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/args_parser.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..a9e1883 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/raw_args.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/raw_args.cpython-38.pyc new file mode 100644 index 0000000..18b381f Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/__pycache__/raw_args.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/args.py b/venv/lib/python3.8/site-packages/clikit/api/args/args.py new file mode 120000 index 0000000..eb2fe4e --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/args.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/dc/e4/39/9ba7567d28664316cdab0800260f60b31db474048c7784bb0bf2053f96 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/args_parser.py b/venv/lib/python3.8/site-packages/clikit/api/args/args_parser.py new file mode 120000 index 0000000..70a8fb3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/args_parser.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/72/17/5d/97ee6e7fcde8ade6a486eb0558015666cb6e4e075321bb2558763f4353 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/exceptions.py b/venv/lib/python3.8/site-packages/clikit/api/args/exceptions.py new file mode 120000 index 0000000..10304b9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/exceptions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/15/24/63/b194091dfd848430cb4191b77a1cbb2a70d56271631a00cd4ef8a56590 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/args/format/__init__.py new file mode 120000 index 0000000..7f453e2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/format/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1a/00/63/ca951cebe38e78e5f165c6afbb997b42efce765a8a72eb131876472191 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..32b9efc Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/abstract_option.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/abstract_option.cpython-38.pyc new file mode 100644 index 0000000..7bf42cd Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/abstract_option.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/args_format.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/args_format.cpython-38.pyc new file mode 100644 index 0000000..bf840f7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/args_format.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/args_format_builder.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/args_format_builder.cpython-38.pyc new file mode 100644 index 0000000..3a98508 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/args_format_builder.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/argument.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/argument.cpython-38.pyc new file mode 100644 index 0000000..a08d7f3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/argument.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/command_name.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/command_name.cpython-38.pyc new file mode 100644 index 0000000..336522f Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/command_name.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/command_option.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/command_option.cpython-38.pyc new file mode 100644 index 0000000..4b278b0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/command_option.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/option.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/option.cpython-38.pyc new file mode 100644 index 0000000..7aa8ac5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/args/format/__pycache__/option.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/abstract_option.py b/venv/lib/python3.8/site-packages/clikit/api/args/format/abstract_option.py new file mode 120000 index 0000000..345497b --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/format/abstract_option.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cf/3a/99/edd841cbe65028fc8cfa2c5ffb5764be10f6877e315e1b92da044bb1f7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/args_format.py b/venv/lib/python3.8/site-packages/clikit/api/args/format/args_format.py new file mode 120000 index 0000000..f0a96b0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/format/args_format.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/75/ad/1d/1bd43bf6dfbf8fe9bf2e84e13c0ef0e4722f8d8b39e46beb9f69614367 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/args_format_builder.py b/venv/lib/python3.8/site-packages/clikit/api/args/format/args_format_builder.py new file mode 120000 index 0000000..82e36cd --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/format/args_format_builder.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/55/a2/65/efd9c7a5d9eacfa7e6e9e266b8200936d0648c328df1b581abcf65290c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/argument.py b/venv/lib/python3.8/site-packages/clikit/api/args/format/argument.py new file mode 120000 index 0000000..b88cd34 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/format/argument.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/98/c6/29/63ef9e26a77908d4291eb2c63c5cd5a213155dc83bf829d0b6ed9147f1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/command_name.py b/venv/lib/python3.8/site-packages/clikit/api/args/format/command_name.py new file mode 120000 index 0000000..5e0dbb0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/format/command_name.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/47/99/87/bae749d2c2b2e3ba9504ca50848d9c05a0dda93bef013b16460250a35e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/command_option.py b/venv/lib/python3.8/site-packages/clikit/api/args/format/command_option.py new file mode 120000 index 0000000..81893f2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/format/command_option.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b4/13/11/226d9e01512960387f18f9b20db84aabdc698ef9fb83c6e9596cab5ea8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/format/option.py b/venv/lib/python3.8/site-packages/clikit/api/args/format/option.py new file mode 120000 index 0000000..74d12cd --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/format/option.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/35/b4/92/1631faba34746b5ebf0197fe993ddd7189e8cc65b97403627681223430 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/args/raw_args.py b/venv/lib/python3.8/site-packages/clikit/api/args/raw_args.py new file mode 120000 index 0000000..2738d93 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/args/raw_args.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/96/6b/49/1f24b26e2ccb7a445b961c9f8c0a4e1a0e7475ad86950734121d36c877 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/command/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/command/__init__.py new file mode 120000 index 0000000..da9f222 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/command/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d3/bf/75/31949664cd21368229e78260184a25c91c5956f6ebb21126ac69de01ba \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..f913477 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/command.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/command.cpython-38.pyc new file mode 100644 index 0000000..0604bc8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/command.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/command_collection.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/command_collection.cpython-38.pyc new file mode 100644 index 0000000..8c152f1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/command_collection.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..929ff39 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/command/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/command/command.py b/venv/lib/python3.8/site-packages/clikit/api/command/command.py new file mode 120000 index 0000000..9c4e234 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/command/command.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9f/80/b2/21c2a9034dad2febbf85a823379350cb7b5b1871a224b1aefd52f135a8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/command/command_collection.py b/venv/lib/python3.8/site-packages/clikit/api/command/command_collection.py new file mode 120000 index 0000000..9cef867 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/command/command_collection.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0e/03/a2/8516ce170f58c40a340c994a5cb76273f276d7ad1ea824422b51c9e45c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/command/exceptions.py b/venv/lib/python3.8/site-packages/clikit/api/command/exceptions.py new file mode 120000 index 0000000..48229c3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/command/exceptions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/72/ea/fe/71a2a459decf90a80e8cbef6e42c2b3b89541973ad135ce8d6fc76ba06 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/config/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/config/__init__.py new file mode 120000 index 0000000..75127bf --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/config/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7b/23/c8/2829722f196c6e60067d1026513d599e7a8d9a25cd61df2630cb599f7d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..187be99 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/application_config.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/application_config.cpython-38.pyc new file mode 100644 index 0000000..0398570 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/application_config.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/command_config.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/command_config.cpython-38.pyc new file mode 100644 index 0000000..d313d2a Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/command_config.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/config.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/config.cpython-38.pyc new file mode 100644 index 0000000..8384df1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/config/__pycache__/config.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/config/application_config.py b/venv/lib/python3.8/site-packages/clikit/api/config/application_config.py new file mode 120000 index 0000000..6de007e --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/config/application_config.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/37/f2/c3/51e6166c1bff18d36ebf55b81a8bc119418242f4fe8d635baf44d4af61 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/config/command_config.py b/venv/lib/python3.8/site-packages/clikit/api/config/command_config.py new file mode 120000 index 0000000..c250ef6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/config/command_config.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f6/13/ba/9655155f4c76cf5dc4900241d3a5c9aee641cdb88403d089a3c2bb91a0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/config/config.py b/venv/lib/python3.8/site-packages/clikit/api/config/config.py new file mode 120000 index 0000000..ed7e3c9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/config/config.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/da/27/eb/bc0609ed79d48f6fa198ff1acc3f2d8c6be8a0f9d11da759ca60731c99 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/event/__init__.py new file mode 120000 index 0000000..310db16 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/event/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/88/ae/38/abdf43263ea3237f854936e0f3b64a48340aa71b997e38436b3cc49479 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..fbe5260 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/config_event.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/config_event.cpython-38.pyc new file mode 100644 index 0000000..c16b6b9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/config_event.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/console_events.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/console_events.cpython-38.pyc new file mode 100644 index 0000000..dde5016 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/console_events.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/event.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/event.cpython-38.pyc new file mode 100644 index 0000000..8d1c097 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/event.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/event_dispatcher.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/event_dispatcher.cpython-38.pyc new file mode 100644 index 0000000..705c3cd Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/event_dispatcher.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/pre_handle_event.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/pre_handle_event.cpython-38.pyc new file mode 100644 index 0000000..9e80097 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/pre_handle_event.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/pre_resolve_event.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/pre_resolve_event.cpython-38.pyc new file mode 100644 index 0000000..59f178b Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/event/__pycache__/pre_resolve_event.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/config_event.py b/venv/lib/python3.8/site-packages/clikit/api/event/config_event.py new file mode 120000 index 0000000..4f3e129 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/event/config_event.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/81/05/55/beb46e798bd5d33dce402351ec5862f383208e30555b4ce7e876bb5152 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/console_events.py b/venv/lib/python3.8/site-packages/clikit/api/event/console_events.py new file mode 120000 index 0000000..6271d9c --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/event/console_events.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/44/72/bc/9ff046659deda1906baa06841a4d5683e74bcfbeb4875edd835b8eedb7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/event.py b/venv/lib/python3.8/site-packages/clikit/api/event/event.py new file mode 120000 index 0000000..b9cde9d --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/event/event.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a4/93/e0/25eb32839257f0f3c0dbece2e641d493701b0d75e00b221806e1ad266a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/event_dispatcher.py b/venv/lib/python3.8/site-packages/clikit/api/event/event_dispatcher.py new file mode 120000 index 0000000..dd3eb78 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/event/event_dispatcher.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/45/9b/d0/d418988f897771be47d1cae7e98453e7f4ffa0e3bc717ab4a8421bbac8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/pre_handle_event.py b/venv/lib/python3.8/site-packages/clikit/api/event/pre_handle_event.py new file mode 120000 index 0000000..453d5d7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/event/pre_handle_event.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/12/81/48/ba1d48fd1d13e6f17b8ded1d0d51bda54e4f60d84d6631ec00e3dd7045 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/event/pre_resolve_event.py b/venv/lib/python3.8/site-packages/clikit/api/event/pre_resolve_event.py new file mode 120000 index 0000000..0e5f381 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/event/pre_resolve_event.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/be/ce/8b/52cd18ab5a4928272a8f0ed7db171fea08a9ef90505e92ad318c2efeef \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/exceptions.py b/venv/lib/python3.8/site-packages/clikit/api/exceptions.py new file mode 120000 index 0000000..d815e32 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/exceptions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bd/60/49/aa1617345060838ac820a374ee13e2038d879afa944b3d63f6cd4dea80 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/formatter/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/formatter/__init__.py new file mode 120000 index 0000000..029659a --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/formatter/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ab/25/35/fd22b5d55463622c2dcb85932bd8395ed7eda531cc3c6ed850eb265bff \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..8a2d789 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/formatter.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/formatter.cpython-38.pyc new file mode 100644 index 0000000..22cc576 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/formatter.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/style.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/style.cpython-38.pyc new file mode 100644 index 0000000..aa92685 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/style.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/style_set.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/style_set.cpython-38.pyc new file mode 100644 index 0000000..e8e187e Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/formatter/__pycache__/style_set.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/formatter/formatter.py b/venv/lib/python3.8/site-packages/clikit/api/formatter/formatter.py new file mode 120000 index 0000000..320dc01 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/formatter/formatter.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/99/6a/6b/205b073ba0d8174beb03c1fb1400abf869c4aee4cbb18f98d56f02ce0b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/formatter/style.py b/venv/lib/python3.8/site-packages/clikit/api/formatter/style.py new file mode 120000 index 0000000..90462bd --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/formatter/style.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/96/6b/e5/da211cdba21640ef9b852e49253f867a6b759a4306ff17d00f699cbc2d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/formatter/style_set.py b/venv/lib/python3.8/site-packages/clikit/api/formatter/style_set.py new file mode 120000 index 0000000..e32ec23 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/formatter/style_set.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f4/c2/4e/fae69d9206d490cb71cb29e5529be1f7ef437b312e114d023768a0fa7a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/io/__init__.py new file mode 120000 index 0000000..6c13b1f --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/io/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c0/53/bc/90563a68d1743727151b80187b927f0392ed16ff0eea2bc5409264f925 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..8d6183b Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/flags.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/flags.cpython-38.pyc new file mode 100644 index 0000000..20e0dc7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/flags.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/input.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/input.cpython-38.pyc new file mode 100644 index 0000000..58bea3a Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/input.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/input_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/input_stream.cpython-38.pyc new file mode 100644 index 0000000..1754a32 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/input_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/io.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/io.cpython-38.pyc new file mode 100644 index 0000000..cf2e680 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/io.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/io_exception.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/io_exception.cpython-38.pyc new file mode 100644 index 0000000..92aec2a Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/io_exception.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/output.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/output.cpython-38.pyc new file mode 100644 index 0000000..bdb217b Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/output.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/output_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/output_stream.cpython-38.pyc new file mode 100644 index 0000000..695f9f5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/output_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/section_output.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/section_output.cpython-38.pyc new file mode 100644 index 0000000..0479f43 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/io/__pycache__/section_output.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/flags.py b/venv/lib/python3.8/site-packages/clikit/api/io/flags.py new file mode 120000 index 0000000..178e08e --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/io/flags.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/07/15/6d/02a89bd892abc5af1e9ca6fb8e03c7adba304b7261d70a66335531c1ef \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/input.py b/venv/lib/python3.8/site-packages/clikit/api/io/input.py new file mode 120000 index 0000000..b20810d --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/io/input.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4e/62/a5/6050c823deac1f7a0cffb240627eb1ee2e0cabc8a646b388b55726ccb3 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/input_stream.py b/venv/lib/python3.8/site-packages/clikit/api/io/input_stream.py new file mode 120000 index 0000000..5ea1e95 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/io/input_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bc/fa/e7/0afcfb0d8b9b36a502b7a820cd48125daf8fa068bc89f28a71a0deb2dd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/io.py b/venv/lib/python3.8/site-packages/clikit/api/io/io.py new file mode 120000 index 0000000..7cab7fa --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/io/io.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fe/dd/6a/337b16241ea7932d6cefb7e5ed1f19aaadaf484808e87a3075627d8c7d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/io_exception.py b/venv/lib/python3.8/site-packages/clikit/api/io/io_exception.py new file mode 120000 index 0000000..5b63562 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/io/io_exception.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/db/7b/40/84391380ca59c370151c042dc03b0bc6b31c4849850ea8408f4cda5353 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/output.py b/venv/lib/python3.8/site-packages/clikit/api/io/output.py new file mode 120000 index 0000000..bb00774 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/io/output.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c2/ea/8a/309d27b206b906bc748b5ad2b12c2ab6d9e460dadb491a97edfb3b1a3c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/output_stream.py b/venv/lib/python3.8/site-packages/clikit/api/io/output_stream.py new file mode 120000 index 0000000..d37388e --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/io/output_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/df/b4/bc/bdcae587518983058a374e70dcdf54b359af0e3fae26e35f0de483d54d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/io/section_output.py b/venv/lib/python3.8/site-packages/clikit/api/io/section_output.py new file mode 120000 index 0000000..9d75b0f --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/io/section_output.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b1/18/fa/1d14821e6e91ac8fe6e14103226b210433b8e449e24576e141e7a45416 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/resolver/__init__.py b/venv/lib/python3.8/site-packages/clikit/api/resolver/__init__.py new file mode 120000 index 0000000..cf310a9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/resolver/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f0/96/ad/bbcee01b45f35bd19ceaef96e1359bb6c9c326fb1373f7e2b5b2182700 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d4a249f Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/command_resolver.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/command_resolver.cpython-38.pyc new file mode 100644 index 0000000..e4c07af Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/command_resolver.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..1266701 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/resolved_command.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/resolved_command.cpython-38.pyc new file mode 100644 index 0000000..c40e29a Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/api/resolver/__pycache__/resolved_command.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/api/resolver/command_resolver.py b/venv/lib/python3.8/site-packages/clikit/api/resolver/command_resolver.py new file mode 120000 index 0000000..a407250 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/resolver/command_resolver.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b4/9c/d7/17d0a60a462e1e67aadeb089f38f200dc399c04f9ea8250600a515e258 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/resolver/exceptions.py b/venv/lib/python3.8/site-packages/clikit/api/resolver/exceptions.py new file mode 120000 index 0000000..9a08689 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/resolver/exceptions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b3/9b/32/860684e0ab1a73205f3e3df02c2048a92a7841d61fa93d366ed67ca37e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/api/resolver/resolved_command.py b/venv/lib/python3.8/site-packages/clikit/api/resolver/resolved_command.py new file mode 120000 index 0000000..8774802 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/api/resolver/resolved_command.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/99/2d/2c/1d443e3fd04c46fe9e3fe882bdfb1d39e1156ea982b81f858adb08a69b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/args/__init__.py b/venv/lib/python3.8/site-packages/clikit/args/__init__.py new file mode 120000 index 0000000..ebeb246 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/args/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/62/e0/cb/31ba430534515fc46c0c6170aa9f508bcdcfd31fd4dacf69010420b91b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/args/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..6022223 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/args/__pycache__/argv_args.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/argv_args.cpython-38.pyc new file mode 100644 index 0000000..429975a Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/argv_args.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/args/__pycache__/default_args_parser.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/default_args_parser.cpython-38.pyc new file mode 100644 index 0000000..680d4d1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/default_args_parser.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/args/__pycache__/string_args.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/string_args.cpython-38.pyc new file mode 100644 index 0000000..6e1e8f6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/string_args.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/args/__pycache__/token_parser.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/token_parser.cpython-38.pyc new file mode 100644 index 0000000..dff76a5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/args/__pycache__/token_parser.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/args/argv_args.py b/venv/lib/python3.8/site-packages/clikit/args/argv_args.py new file mode 120000 index 0000000..2081877 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/args/argv_args.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ec/58/92/721972873e34573b182bbb89b1a48ee170fdc74e5c8630d52ca029a702 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/args/default_args_parser.py b/venv/lib/python3.8/site-packages/clikit/args/default_args_parser.py new file mode 120000 index 0000000..e0cdedc --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/args/default_args_parser.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/07/f1/7d/327dc6f110ba74f44fc4a4f6aa57ecabb564d28f8a56c95739b974a32f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/args/inputs/__init__.py b/venv/lib/python3.8/site-packages/clikit/args/inputs/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/args/inputs/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/args/inputs/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/args/inputs/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..2e4e381 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/args/inputs/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/args/string_args.py b/venv/lib/python3.8/site-packages/clikit/args/string_args.py new file mode 120000 index 0000000..777fc17 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/args/string_args.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/61/24/27/4fdfca48aaa737089bc09e9584304527d5486f0040f3c88cac81f649c2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/args/token_parser.py b/venv/lib/python3.8/site-packages/clikit/args/token_parser.py new file mode 120000 index 0000000..3366ee4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/args/token_parser.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f0/b5/b9/a84724ea613073081f3c1c30193a35b5fbbcf2ae4fef3343148aef4514 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/config/__init__.py b/venv/lib/python3.8/site-packages/clikit/config/__init__.py new file mode 120000 index 0000000..79ff839 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/config/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/77/19/95/634959d3e1ed7fe2a49bb327ac55b9e3596479e2019d01b5d10e72d0b2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/config/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/config/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..861f55e Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/config/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/config/__pycache__/default_application_config.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/config/__pycache__/default_application_config.cpython-38.pyc new file mode 100644 index 0000000..bfc04ed Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/config/__pycache__/default_application_config.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/config/default_application_config.py b/venv/lib/python3.8/site-packages/clikit/config/default_application_config.py new file mode 120000 index 0000000..8f4b736 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/config/default_application_config.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/96/12/ae/668b20db7590f55e07ee7b0da157240955dcd7c135decc4c023a992b9c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/console_application.py b/venv/lib/python3.8/site-packages/clikit/console_application.py new file mode 120000 index 0000000..124984b --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/console_application.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/aa/e5/30/4df0966b8d3652024ba772f29fee9fe2ff0a505c8086c74620c79c41b8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/__init__.py b/venv/lib/python3.8/site-packages/clikit/formatter/__init__.py new file mode 120000 index 0000000..9c7a51c --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/formatter/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e6/0a/32/e3e3942bc2fbb949acae99052dd42395102a92d958cb3bb50f9a983262 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..e099e4b Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/ansi_formatter.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/ansi_formatter.cpython-38.pyc new file mode 100644 index 0000000..1c2d20e Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/ansi_formatter.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/default_style_set.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/default_style_set.cpython-38.pyc new file mode 100644 index 0000000..b96e775 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/default_style_set.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/null_formatter.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/null_formatter.cpython-38.pyc new file mode 100644 index 0000000..5e6fc1a Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/null_formatter.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/plain_formatter.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/plain_formatter.cpython-38.pyc new file mode 100644 index 0000000..b5c7d2c Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/formatter/__pycache__/plain_formatter.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/ansi_formatter.py b/venv/lib/python3.8/site-packages/clikit/formatter/ansi_formatter.py new file mode 120000 index 0000000..100aaab --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/formatter/ansi_formatter.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d4/f2/39/2521d6557b9398520cd0b51a1ada5834015b27efdfb82003e124cb17ef \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/default_style_set.py b/venv/lib/python3.8/site-packages/clikit/formatter/default_style_set.py new file mode 120000 index 0000000..ac88f03 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/formatter/default_style_set.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/14/1c/8c/24545d338b014eeb13414337b66f00583d1cf63292492e047452c6c993 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/null_formatter.py b/venv/lib/python3.8/site-packages/clikit/formatter/null_formatter.py new file mode 120000 index 0000000..879c387 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/formatter/null_formatter.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4b/5d/d8/495bb06a8aa9716afec271a0d0372f7a0482ec727e73eb953129d48dbc \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/formatter/plain_formatter.py b/venv/lib/python3.8/site-packages/clikit/formatter/plain_formatter.py new file mode 120000 index 0000000..9d518d3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/formatter/plain_formatter.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/00/51/ad/32f1bfd9b5297ca89aec8f33c44299ccb7769c9954d02e6170695d5c28 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/handler/__init__.py b/venv/lib/python3.8/site-packages/clikit/handler/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/handler/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/handler/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/handler/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..73caf23 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/handler/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/handler/__pycache__/callback_handler.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/handler/__pycache__/callback_handler.cpython-38.pyc new file mode 100644 index 0000000..8153ecd Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/handler/__pycache__/callback_handler.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/handler/callback_handler.py b/venv/lib/python3.8/site-packages/clikit/handler/callback_handler.py new file mode 120000 index 0000000..fa58ec6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/handler/callback_handler.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9a/49/98/ac929b53db04b100577b9c55f95e28e84654cfeefabf2d99e78334f656 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/handler/help/__init__.py b/venv/lib/python3.8/site-packages/clikit/handler/help/__init__.py new file mode 120000 index 0000000..6ea4d87 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/handler/help/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/83/a7/6d/f06896d8dc3a4b6283a4607ce8be07f5f9ef99063885af48a610bd0095 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/handler/help/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/handler/help/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..ec7db0a Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/handler/help/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/handler/help/__pycache__/help_handler.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/handler/help/__pycache__/help_handler.cpython-38.pyc new file mode 100644 index 0000000..242c6df Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/handler/help/__pycache__/help_handler.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/handler/help/__pycache__/help_text_handler.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/handler/help/__pycache__/help_text_handler.cpython-38.pyc new file mode 100644 index 0000000..87149d4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/handler/help/__pycache__/help_text_handler.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/handler/help/help_handler.py b/venv/lib/python3.8/site-packages/clikit/handler/help/help_handler.py new file mode 120000 index 0000000..fb7ee61 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/handler/help/help_handler.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a0/82/d8/5d31f81fc1905324c209d2f6ed5d57f4157261bb2076eb973ff1eb469b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/handler/help/help_text_handler.py b/venv/lib/python3.8/site-packages/clikit/handler/help/help_text_handler.py new file mode 120000 index 0000000..f1f174e --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/handler/help/help_text_handler.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e7/47/c0/77299a9d17091be40ab8b2b55aed69af2623074cd84c782886bbde1e2d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/__init__.py b/venv/lib/python3.8/site-packages/clikit/io/__init__.py new file mode 120000 index 0000000..8ea32f6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/08/57/cc/d62b02ab43b1de37202b2fed8b7e8b8c420a6fe582a902d5e0493984fe \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..47c792a Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/__pycache__/buffered_io.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/__pycache__/buffered_io.cpython-38.pyc new file mode 100644 index 0000000..812c855 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/__pycache__/buffered_io.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/__pycache__/console_io.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/__pycache__/console_io.cpython-38.pyc new file mode 100644 index 0000000..49d94e7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/__pycache__/console_io.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/__pycache__/null_io.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/__pycache__/null_io.cpython-38.pyc new file mode 100644 index 0000000..025b16d Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/__pycache__/null_io.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/buffered_io.py b/venv/lib/python3.8/site-packages/clikit/io/buffered_io.py new file mode 120000 index 0000000..970dc51 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/buffered_io.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/38/d5/1d/20d812e93740754666e7c92242b7bcf6aad5e1fd1c6906d8b332ccf8ea \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/console_io.py b/venv/lib/python3.8/site-packages/clikit/io/console_io.py new file mode 120000 index 0000000..eacafa5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/console_io.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/60/ba/b4/2e24d990930268dbd8e737c5ac769468d725094a2f8b6aeef88f8b4228 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/__init__.py b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__init__.py new file mode 120000 index 0000000..858b3e1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/be/7e/42/aad385545cd316e7cc666a84f494760cc41fbfb90453127710838a9d24 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..e07df95 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/null_input_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/null_input_stream.cpython-38.pyc new file mode 100644 index 0000000..4c7787c Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/null_input_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/standard_input_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/standard_input_stream.cpython-38.pyc new file mode 100644 index 0000000..edc8559 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/standard_input_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/stream_input_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/stream_input_stream.cpython-38.pyc new file mode 100644 index 0000000..a8e4953 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/stream_input_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/string_input_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/string_input_stream.cpython-38.pyc new file mode 100644 index 0000000..13e35fa Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/input_stream/__pycache__/string_input_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/null_input_stream.py b/venv/lib/python3.8/site-packages/clikit/io/input_stream/null_input_stream.py new file mode 120000 index 0000000..028240e --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/input_stream/null_input_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a1/7e/50/f915f39cc05a20660c21d59727fecc2940e54cafe21c2b2ba707d671e7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/standard_input_stream.py b/venv/lib/python3.8/site-packages/clikit/io/input_stream/standard_input_stream.py new file mode 120000 index 0000000..f57ca71 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/input_stream/standard_input_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/43/51/2e/b2a00048d641364aa3f0a950e11fb31338ed67aba116892abf504d86ce \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/stream_input_stream.py b/venv/lib/python3.8/site-packages/clikit/io/input_stream/stream_input_stream.py new file mode 120000 index 0000000..3c205ed --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/input_stream/stream_input_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1d/1c/26/36c8baad0eb88e788203844491a6ca3dfacdfcf941d1c1720f148e6a43 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/input_stream/string_input_stream.py b/venv/lib/python3.8/site-packages/clikit/io/input_stream/string_input_stream.py new file mode 120000 index 0000000..da56177 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/input_stream/string_input_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d7/ee/59/71bd9df4b58578445095c1726cd5e11bc5051ad5ed0e88752f3808cdc4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/null_io.py b/venv/lib/python3.8/site-packages/clikit/io/null_io.py new file mode 120000 index 0000000..3e79a2a --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/null_io.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/dc/47/81/102d640e051e13a276a9ad5b8c7429964aa3c024e5821a3564bbe767dc \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/__init__.py b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__init__.py new file mode 120000 index 0000000..51d2080 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/64/da/97/267e8a2c0079f193f0db8c07cf48ce560bdfa25b876ba5b0c0a062bc16 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..06882dd Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/buffered_output_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/buffered_output_stream.cpython-38.pyc new file mode 100644 index 0000000..5fbf0bb Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/buffered_output_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/error_output_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/error_output_stream.cpython-38.pyc new file mode 100644 index 0000000..c1559d2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/error_output_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/null_output_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/null_output_stream.cpython-38.pyc new file mode 100644 index 0000000..39d0a01 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/null_output_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/standard_output_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/standard_output_stream.cpython-38.pyc new file mode 100644 index 0000000..fc5c176 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/standard_output_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/stream_output_stream.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/stream_output_stream.cpython-38.pyc new file mode 100644 index 0000000..6ae6f73 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/io/output_stream/__pycache__/stream_output_stream.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/buffered_output_stream.py b/venv/lib/python3.8/site-packages/clikit/io/output_stream/buffered_output_stream.py new file mode 120000 index 0000000..a3893d6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/output_stream/buffered_output_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/53/d2/fd/7a91d37c08f7671c2cc34944c0a1684c1b929cc36e53f4c38d27ffbd97 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/error_output_stream.py b/venv/lib/python3.8/site-packages/clikit/io/output_stream/error_output_stream.py new file mode 120000 index 0000000..ad74403 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/output_stream/error_output_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cb/2a/16/f465f7bca0dac0342de4f9d7cafaef63cff5f273f98ca5d8bec9e7ba24 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/null_output_stream.py b/venv/lib/python3.8/site-packages/clikit/io/output_stream/null_output_stream.py new file mode 120000 index 0000000..ad522c0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/output_stream/null_output_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/18/5f/f6/b87ba75cc417bdd6574b34f0cc31c301a9cdc06b3c7a2be58cadc488c3 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/standard_output_stream.py b/venv/lib/python3.8/site-packages/clikit/io/output_stream/standard_output_stream.py new file mode 120000 index 0000000..289d82b --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/output_stream/standard_output_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/23/32/9f/8cdac7df232893140ec1636ba54ca7b8d3db6858e526a4f47a736c535c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/io/output_stream/stream_output_stream.py b/venv/lib/python3.8/site-packages/clikit/io/output_stream/stream_output_stream.py new file mode 120000 index 0000000..fe40681 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/io/output_stream/stream_output_stream.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/de/f8/a0/fe1ca9d1d383b28424ceae1fa23486df096689e595ff36a60a7d935fe5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/resolver/__init__.py b/venv/lib/python3.8/site-packages/clikit/resolver/__init__.py new file mode 120000 index 0000000..f81df64 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/resolver/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3f/1d/40/02269d9361becbecda673c504063999f710f4e7561eecf620e7e2b8b28 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..a9b1c92 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/default_resolver.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/default_resolver.cpython-38.pyc new file mode 100644 index 0000000..545e85a Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/default_resolver.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/help_resolver.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/help_resolver.cpython-38.pyc new file mode 100644 index 0000000..faa0a57 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/help_resolver.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/resolve_result.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/resolve_result.cpython-38.pyc new file mode 100644 index 0000000..2899fcc Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/resolver/__pycache__/resolve_result.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/resolver/default_resolver.py b/venv/lib/python3.8/site-packages/clikit/resolver/default_resolver.py new file mode 120000 index 0000000..2bc62b8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/resolver/default_resolver.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/aa/b4/6a/dc4b8fddc697839c1c573eab973b3e256a8c61eaac757bcf345dbd9c25 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/resolver/help_resolver.py b/venv/lib/python3.8/site-packages/clikit/resolver/help_resolver.py new file mode 120000 index 0000000..ea0ad15 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/resolver/help_resolver.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fc/0e/fc/b76d6364bb0760314389cb74fac8216bf57fb014bdb7389ef2404c1d57 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/resolver/resolve_result.py b/venv/lib/python3.8/site-packages/clikit/resolver/resolve_result.py new file mode 120000 index 0000000..53dfbee --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/resolver/resolve_result.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0e/24/b3/20c7a8d35fabcdf699d4044a93d1d0b8b9bc0a42ae62a49ea020ebdbc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/__init__.py b/venv/lib/python3.8/site-packages/clikit/ui/__init__.py new file mode 120000 index 0000000..5f9225c --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/eb/42/3f/a73b91d26924f6e05cfe82caefcba8d552dc4a7aa4e791d4f26b8e35a8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4acd9db Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/__pycache__/component.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/__pycache__/component.cpython-38.pyc new file mode 100644 index 0000000..bb275d0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/__pycache__/component.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/__pycache__/rectangle.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/__pycache__/rectangle.cpython-38.pyc new file mode 100644 index 0000000..badb7ce Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/__pycache__/rectangle.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/alignment/__init__.py b/venv/lib/python3.8/site-packages/clikit/ui/alignment/__init__.py new file mode 120000 index 0000000..85a2d76 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/alignment/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cf/a6/90/c93849cded09d4f93498d5c25655bc229939da7d2b7ef895e041d2039c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/alignment/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/alignment/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4c7d16b Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/alignment/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/alignment/__pycache__/label_alignment.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/alignment/__pycache__/label_alignment.cpython-38.pyc new file mode 100644 index 0000000..da483f1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/alignment/__pycache__/label_alignment.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/alignment/label_alignment.py b/venv/lib/python3.8/site-packages/clikit/ui/alignment/label_alignment.py new file mode 120000 index 0000000..8f542c6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/alignment/label_alignment.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5b/61/a7/c3614d24792b21e1f6a5c258acf948e451b6c67211b982f9ccd3423048 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/component.py b/venv/lib/python3.8/site-packages/clikit/ui/component.py new file mode 120000 index 0000000..3aae429 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/component.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ea/45/ce/72b891e496b0800e17447689d147be2a58fa31e2345741a52b62720423 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__init__.py b/venv/lib/python3.8/site-packages/clikit/ui/components/__init__.py new file mode 120000 index 0000000..4dd6c05 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b4/fb/29/f137ae7a88901de08f970eab89d3c9e564b994bc24e223617f766ee7c5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..ae88ea1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/border_util.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/border_util.cpython-38.pyc new file mode 100644 index 0000000..4f6a6f7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/border_util.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/cell_wrapper.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/cell_wrapper.cpython-38.pyc new file mode 100644 index 0000000..279733c Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/cell_wrapper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/choice_question.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/choice_question.cpython-38.pyc new file mode 100644 index 0000000..c294353 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/choice_question.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/confirmation_question.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/confirmation_question.cpython-38.pyc new file mode 100644 index 0000000..aa01b3f Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/confirmation_question.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/empty_line.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/empty_line.cpython-38.pyc new file mode 100644 index 0000000..5220af1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/empty_line.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/exception_trace.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/exception_trace.cpython-38.pyc new file mode 100644 index 0000000..8cb135c Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/exception_trace.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/labeled_paragraph.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/labeled_paragraph.cpython-38.pyc new file mode 100644 index 0000000..11ed505 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/labeled_paragraph.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/name_version.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/name_version.cpython-38.pyc new file mode 100644 index 0000000..2c933f6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/name_version.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/paragraph.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/paragraph.cpython-38.pyc new file mode 100644 index 0000000..9dd2b70 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/paragraph.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/progress_bar.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/progress_bar.cpython-38.pyc new file mode 100644 index 0000000..f05b7b6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/progress_bar.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/progress_indicator.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/progress_indicator.cpython-38.pyc new file mode 100644 index 0000000..dcb7712 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/progress_indicator.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/question.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/question.cpython-38.pyc new file mode 100644 index 0000000..70ac8e9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/question.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/table.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/table.cpython-38.pyc new file mode 100644 index 0000000..e5fca84 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/components/__pycache__/table.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/border_util.py b/venv/lib/python3.8/site-packages/clikit/ui/components/border_util.py new file mode 120000 index 0000000..1397f36 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/border_util.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/36/a9/13/e66c097e0f636f167823f217f57dba98c7c6488616b9167b4503c64e32 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/cell_wrapper.py b/venv/lib/python3.8/site-packages/clikit/ui/components/cell_wrapper.py new file mode 120000 index 0000000..8259458 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/cell_wrapper.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e4/e1/61/72c5d8c87a13183738a0d2e1b3dfa103339c3410434318e50780ca1654 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/choice_question.py b/venv/lib/python3.8/site-packages/clikit/ui/components/choice_question.py new file mode 120000 index 0000000..f3546d1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/choice_question.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/27/5f/bd/60c3e53a588a3e21627aebcd41d07e5ee0846d96a8ef4f3d42dcdac571 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/confirmation_question.py b/venv/lib/python3.8/site-packages/clikit/ui/components/confirmation_question.py new file mode 120000 index 0000000..b38ca62 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/confirmation_question.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fc/62/6d/2d68f14b252f5bb80a01b44ef69ce36f829312329b25bf5d29e272bbd9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/empty_line.py b/venv/lib/python3.8/site-packages/clikit/ui/components/empty_line.py new file mode 120000 index 0000000..bc98d4b --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/empty_line.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8c/61/1f/169d76c291916e810faff071129bacb7fbd81282ba903ddeb1e1bd458f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/exception_trace.py b/venv/lib/python3.8/site-packages/clikit/ui/components/exception_trace.py new file mode 120000 index 0000000..9740ceb --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/exception_trace.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/19/40/96/77217d800566f320b1299b22466ca90141b579fd3f6edfd806f134e13a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/labeled_paragraph.py b/venv/lib/python3.8/site-packages/clikit/ui/components/labeled_paragraph.py new file mode 120000 index 0000000..ca29eb9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/labeled_paragraph.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/80/f3/bc/7aa2a893017650de33c02f25d1000a377b2975deb7d56ea789ba0424ef \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/name_version.py b/venv/lib/python3.8/site-packages/clikit/ui/components/name_version.py new file mode 120000 index 0000000..75ece96 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/name_version.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5a/3d/61/d15e589560da2c88257791cebab04930339b6cada587e4b8940d4ca07f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/paragraph.py b/venv/lib/python3.8/site-packages/clikit/ui/components/paragraph.py new file mode 120000 index 0000000..0e47c09 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/paragraph.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5c/3e/02/155595b62b115108a5f01ae049cbc9e9ba2abc3c47f399b40074d4b19b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/progress_bar.py b/venv/lib/python3.8/site-packages/clikit/ui/components/progress_bar.py new file mode 120000 index 0000000..b94407c --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/progress_bar.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0b/cc/44/20eaedbc7db0dfc7f5bdad412888f93abfbe0bfddb4bb4bb2a7dd75837 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/progress_indicator.py b/venv/lib/python3.8/site-packages/clikit/ui/components/progress_indicator.py new file mode 120000 index 0000000..2469220 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/progress_indicator.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/14/82/7b/f216ed8abbc05192a609c84cd0b41a3653c1770c6405230ce8273d54b0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/question.py b/venv/lib/python3.8/site-packages/clikit/ui/components/question.py new file mode 120000 index 0000000..05f4533 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/question.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cd/16/73/76d907ae8adb67fdf86c9ca8bf59d887091630a4447e216bc6eb7d306a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/components/table.py b/venv/lib/python3.8/site-packages/clikit/ui/components/table.py new file mode 120000 index 0000000..58872e3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/components/table.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8e/7e/fb/323e94afa60e99996d681abbef6a2edac966454a8ff82bed667d01e51c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/help/__init__.py b/venv/lib/python3.8/site-packages/clikit/ui/help/__init__.py new file mode 120000 index 0000000..ac06eb4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/help/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3c/4e/03/4478d9a54350fe8559b43818924838af24f7cc980fce99159c5551f178 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..638dd02 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/abstract_help.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/abstract_help.cpython-38.pyc new file mode 100644 index 0000000..a383a49 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/abstract_help.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/application_help.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/application_help.cpython-38.pyc new file mode 100644 index 0000000..0d3fc9e Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/application_help.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/command_help.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/command_help.cpython-38.pyc new file mode 100644 index 0000000..f92d1b6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/help/__pycache__/command_help.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/help/abstract_help.py b/venv/lib/python3.8/site-packages/clikit/ui/help/abstract_help.py new file mode 120000 index 0000000..adedca4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/help/abstract_help.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/eb/5a/63/1db574cfe6a3751aa2e5b0d799e22876e848b8163943dd938212fc3335 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/help/application_help.py b/venv/lib/python3.8/site-packages/clikit/ui/help/application_help.py new file mode 120000 index 0000000..5ba8a04 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/help/application_help.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/80/2e/17/8b02cab8265266abe455f1c5d75414b42b655524778b20855c77d15f18 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/help/command_help.py b/venv/lib/python3.8/site-packages/clikit/ui/help/command_help.py new file mode 120000 index 0000000..20869d5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/help/command_help.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5d/37/2a/9aceabfba3094717e9c19c50ac189d6fcc0080af051555369f2515d465 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/layout/__init__.py b/venv/lib/python3.8/site-packages/clikit/ui/layout/__init__.py new file mode 120000 index 0000000..c50f575 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/layout/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/17/03/a8/81b8d9b0e2d3a2d0f43e5c5834866cbb3922dbd631cb6d55b599bc1e4b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/layout/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/layout/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..f021e6b Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/layout/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/layout/__pycache__/block_layout.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/layout/__pycache__/block_layout.cpython-38.pyc new file mode 100644 index 0000000..3e0ebf6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/layout/__pycache__/block_layout.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/layout/block_layout.py b/venv/lib/python3.8/site-packages/clikit/ui/layout/block_layout.py new file mode 120000 index 0000000..d5a76b2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/layout/block_layout.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/40/2f/b0/d9b47971282117cf4697313a31cc3850cc329d2d5286269fc12e80b7dc \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/rectangle.py b/venv/lib/python3.8/site-packages/clikit/ui/rectangle.py new file mode 120000 index 0000000..0c26598 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/rectangle.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/37/d6/bd/7068b1d1d95ffb88ec1a6eba1202234dcb529d95c7371bf973fc989e26 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/style/__init__.py b/venv/lib/python3.8/site-packages/clikit/ui/style/__init__.py new file mode 120000 index 0000000..e656b13 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/style/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b1/66/16/db40f4a29c2333ccf2ab1788817405765c7a614f728631e220b3ed2d5c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..0d3aee9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/alignment.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/alignment.cpython-38.pyc new file mode 100644 index 0000000..6740c1f Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/alignment.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/border_style.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/border_style.cpython-38.pyc new file mode 100644 index 0000000..722d124 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/border_style.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/table_style.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/table_style.cpython-38.pyc new file mode 100644 index 0000000..e97c97e Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/ui/style/__pycache__/table_style.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/ui/style/alignment.py b/venv/lib/python3.8/site-packages/clikit/ui/style/alignment.py new file mode 120000 index 0000000..f1a3a04 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/style/alignment.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f8/15/82/3b2609e1f88bd4ff0f0a95159d96aba7b88c09fa4e8f5d67434e59e4b6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/style/border_style.py b/venv/lib/python3.8/site-packages/clikit/ui/style/border_style.py new file mode 120000 index 0000000..2b473b5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/style/border_style.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4d/2c/07/7c53157e924791625a296d27f0fcc10ce1d944ff106774a5451a265e9e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/ui/style/table_style.py b/venv/lib/python3.8/site-packages/clikit/ui/style/table_style.py new file mode 120000 index 0000000..41c214a --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/ui/style/table_style.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/af/02/db/c24054c8a01e9cbb768dcbf981612c56635927840c19f6ac4621f71558 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/utils/__init__.py b/venv/lib/python3.8/site-packages/clikit/utils/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/utils/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..eb528fa Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/_compat.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000..457a0c9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/_compat.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/command.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/command.cpython-38.pyc new file mode 100644 index 0000000..3fd3ce1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/command.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/string.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/string.cpython-38.pyc new file mode 100644 index 0000000..dc45e9d Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/string.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/terminal.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/terminal.cpython-38.pyc new file mode 100644 index 0000000..4fe69c3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/terminal.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/time.cpython-38.pyc b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/time.cpython-38.pyc new file mode 100644 index 0000000..f7cb3b1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/clikit/utils/__pycache__/time.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/clikit/utils/_compat.py b/venv/lib/python3.8/site-packages/clikit/utils/_compat.py new file mode 120000 index 0000000..597bb55 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/utils/_compat.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/13/68/64/5cc9d689f9e00aaa0e81de55dcd78fdea850172b4f7d391f67041e84fd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/utils/command.py b/venv/lib/python3.8/site-packages/clikit/utils/command.py new file mode 120000 index 0000000..fa174b1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/utils/command.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2b/35/8c/aa657928ee14a7f7bd76e20e4fb17d02a241d5018ec7159fc1ba17c650 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/utils/string.py b/venv/lib/python3.8/site-packages/clikit/utils/string.py new file mode 120000 index 0000000..73c49db --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/utils/string.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8d/aa/22/5a1a8ee6f45ac768e0c8ee98bc40601d42031fb34884f19c8d3bda0a22 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/utils/terminal.py b/venv/lib/python3.8/site-packages/clikit/utils/terminal.py new file mode 120000 index 0000000..9849334 --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/utils/terminal.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/56/19/d4/241b8862db8142240649381338b4816972756a77a9c45d2e87f0281251 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/clikit/utils/time.py b/venv/lib/python3.8/site-packages/clikit/utils/time.py new file mode 120000 index 0000000..ad275ca --- /dev/null +++ b/venv/lib/python3.8/site-packages/clikit/utils/time.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7c/9a/c5/2f7a4dc27d4350f17430bfc6b8bf45c36ede1cab7db33c00e449c47906 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/LICENSE b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/LICENSE new file mode 120000 index 0000000..28a136c --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f1/97/81/33782b90f4733bc308ddb19267c3fe04797c88d9ed3bc219032495a982 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/METADATA new file mode 120000 index 0000000..b38f72e --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3d/f3/06/5936fa9ffb64ac74106f72c7fef7e9e55b18bc9542325ff54a6cad7e58 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/RECORD new file mode 100644 index 0000000..83e9a41 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/RECORD @@ -0,0 +1,29 @@ +crashtest-0.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +crashtest-0.3.1.dist-info/LICENSE,sha256=8ZeBM3grkPRzO8MI3bGSZ8P-BHl8iNntO8IZAySVqYI,1062 +crashtest-0.3.1.dist-info/METADATA,sha256=PfMGWTb6n_tkrHQQb3LH_vfp5VsYvJVCMl_1Smytflg,748 +crashtest-0.3.1.dist-info/RECORD,, +crashtest-0.3.1.dist-info/WHEEL,sha256=jM75OLmO0bF2QZxXmlw3A-IE58epFjnoGYhyw7ywvsc,85 +crashtest/__init__.py,sha256=r4xAFihOf72W9TD-lpMi6ntWSTKTP2SlzKP1ytkjRbI,22 +crashtest/__pycache__/__init__.cpython-38.pyc,, +crashtest/__pycache__/frame.cpython-38.pyc,, +crashtest/__pycache__/frame_collection.cpython-38.pyc,, +crashtest/__pycache__/inspector.cpython-38.pyc,, +crashtest/contracts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +crashtest/contracts/__pycache__/__init__.cpython-38.pyc,, +crashtest/contracts/__pycache__/base_solution.cpython-38.pyc,, +crashtest/contracts/__pycache__/has_solutions_for_exception.cpython-38.pyc,, +crashtest/contracts/__pycache__/provides_solution.cpython-38.pyc,, +crashtest/contracts/__pycache__/solution.cpython-38.pyc,, +crashtest/contracts/__pycache__/solution_provider_repository.cpython-38.pyc,, +crashtest/contracts/base_solution.py,sha256=XwGYk4seqED9tMmBhED3PdJHFT3qCIbi8i3tyUGqG4Q,517 +crashtest/contracts/has_solutions_for_exception.py,sha256=x6QLIK-n3e-yw4basppwwdwm3I6lzjCJ-_9mHbFbAbU,287 +crashtest/contracts/provides_solution.py,sha256=4KSZJkWAH5dzYDR2h5H6AaXqNXOVja-307BANcEALk0,143 +crashtest/contracts/solution.py,sha256=4Cnq1oTF2VUd0ZIm14SeQmv5rCA99BkwNTiSX-ZfjR0,322 +crashtest/contracts/solution_provider_repository.py,sha256=tZYxED5MRPJmQoVXto6yOhbah0lgwxc7lav9x3JvgzY,556 +crashtest/frame.py,sha256=nr5Mfglwws6iWaLiX5XFoBITWXqQvJDEidkMMpAovJY,2050 +crashtest/frame_collection.py,sha256=x9Kg2zMnNLoiMm4sXschkzpzmGP6dXjjTcuV3O8yh4s,2113 +crashtest/inspector.py,sha256=dPottVpq0KX0xErBqmCP2k_lH7wafP4EXcmXSxSDF7I,1279 +crashtest/solution_providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +crashtest/solution_providers/__pycache__/__init__.cpython-38.pyc,, +crashtest/solution_providers/__pycache__/solution_provider_repository.cpython-38.pyc,, +crashtest/solution_providers/solution_provider_repository.py,sha256=vUonL8oKrC8yF9c_EorwY81lQQQKErQHkwLD6UXDwnc,2165 diff --git a/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/WHEEL new file mode 120000 index 0000000..9613080 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest-0.3.1.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8c/ce/f9/38b98ed1b176419c579a5c3703e204e7c7a91639e8198872c3bcb0bec7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/__init__.py b/venv/lib/python3.8/site-packages/crashtest/__init__.py new file mode 120000 index 0000000..ed84738 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/af/8c/40/16284e7fbd96f530fe969322ea7b564932933f64a5cca3f5cad92345b2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..780c6ae Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/__pycache__/frame.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/__pycache__/frame.cpython-38.pyc new file mode 100644 index 0000000..a3b25b7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/__pycache__/frame.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/__pycache__/frame_collection.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/__pycache__/frame_collection.cpython-38.pyc new file mode 100644 index 0000000..9b801bf Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/__pycache__/frame_collection.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/__pycache__/inspector.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/__pycache__/inspector.cpython-38.pyc new file mode 100644 index 0000000..030af84 Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/__pycache__/inspector.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/__init__.py b/venv/lib/python3.8/site-packages/crashtest/contracts/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/contracts/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4927afe Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/base_solution.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/base_solution.cpython-38.pyc new file mode 100644 index 0000000..8693c22 Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/base_solution.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/has_solutions_for_exception.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/has_solutions_for_exception.cpython-38.pyc new file mode 100644 index 0000000..52886a4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/has_solutions_for_exception.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/provides_solution.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/provides_solution.cpython-38.pyc new file mode 100644 index 0000000..ddd1697 Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/provides_solution.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/solution.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/solution.cpython-38.pyc new file mode 100644 index 0000000..3450680 Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/solution.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/solution_provider_repository.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/solution_provider_repository.cpython-38.pyc new file mode 100644 index 0000000..5af978c Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/contracts/__pycache__/solution_provider_repository.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/base_solution.py b/venv/lib/python3.8/site-packages/crashtest/contracts/base_solution.py new file mode 120000 index 0000000..8f98a92 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/contracts/base_solution.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5f/01/98/938b1ea840fdb4c9818440f73dd247153dea0886e2f22dedc941aa1b84 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/has_solutions_for_exception.py b/venv/lib/python3.8/site-packages/crashtest/contracts/has_solutions_for_exception.py new file mode 120000 index 0000000..61b5e39 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/contracts/has_solutions_for_exception.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c7/a4/0b/20afa7ddefb2c386dab29a70c1dc26dc8ea5ce3089fbff661db15b01b5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/provides_solution.py b/venv/lib/python3.8/site-packages/crashtest/contracts/provides_solution.py new file mode 120000 index 0000000..77dc52f --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/contracts/provides_solution.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e0/a4/99/2645801f97736034768791fa01a5ea3573958dafb7d3b04035c1002e4d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/solution.py b/venv/lib/python3.8/site-packages/crashtest/contracts/solution.py new file mode 120000 index 0000000..db2f721 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/contracts/solution.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e0/29/ea/d684c5d9551dd19226d7849e426bf9ac203df419303538925fe65f8d1d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/contracts/solution_provider_repository.py b/venv/lib/python3.8/site-packages/crashtest/contracts/solution_provider_repository.py new file mode 120000 index 0000000..05b00ca --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/contracts/solution_provider_repository.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b5/96/31/103e4c44f266428557b68eb23a16da874960c3173b95abfdc7726f8336 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/frame.py b/venv/lib/python3.8/site-packages/crashtest/frame.py new file mode 120000 index 0000000..48b1b5f --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/frame.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9e/be/4c/7e0970c2cea259a2e25f95c5a01213597a90bc90c489d90c329028bc96 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/frame_collection.py b/venv/lib/python3.8/site-packages/crashtest/frame_collection.py new file mode 120000 index 0000000..ada4555 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/frame_collection.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c7/d2/a0/db332734ba22326e2c5ec721933a739863fa7578e34dcb95dcef32878b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/inspector.py b/venv/lib/python3.8/site-packages/crashtest/inspector.py new file mode 120000 index 0000000..fe71ba8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/inspector.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/74/fa/2d/b55a6ad0a5f4c44ac1aa608fda4fe51fbc1a7cfe045dc9974b148317b2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/solution_providers/__init__.py b/venv/lib/python3.8/site-packages/crashtest/solution_providers/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/solution_providers/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/crashtest/solution_providers/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/solution_providers/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..ce67eae Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/solution_providers/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/solution_providers/__pycache__/solution_provider_repository.cpython-38.pyc b/venv/lib/python3.8/site-packages/crashtest/solution_providers/__pycache__/solution_provider_repository.cpython-38.pyc new file mode 100644 index 0000000..8f7c35c Binary files /dev/null and b/venv/lib/python3.8/site-packages/crashtest/solution_providers/__pycache__/solution_provider_repository.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/crashtest/solution_providers/solution_provider_repository.py b/venv/lib/python3.8/site-packages/crashtest/solution_providers/solution_provider_repository.py new file mode 120000 index 0000000..618e1a1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/crashtest/solution_providers/solution_provider_repository.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bd/4a/27/2fca0aac2f3217d73f128af063cd6541040a12b4079302c3e945c3c277 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE new file mode 120000 index 0000000..bf4a04c --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/43/da/d2/cc752ab721cd9a9f36ece70fb53ab7713551f2d3d8694d8e8c5a06d6e2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE.APACHE b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE.APACHE new file mode 120000 index 0000000..db18be2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE.APACHE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/aa/c7/3b/3148f6d1d7111dbca32099f68d26c644c6813ae1e4f05f6579aa2663fe \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE.BSD b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE.BSD new file mode 120000 index 0000000..9f16920 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE.BSD @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/60/2c/4c/7482de6479dd2e9793cda275e5e63d773dacd1eca689232ab7008fb4fb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE.PSF b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE.PSF new file mode 120000 index 0000000..7d2cfab --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/LICENSE.PSF @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/69/3e/c0/a662b39f995a4f252b03a6222945470c1b6f12ca02918e4efe0df64b9f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/METADATA b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/METADATA new file mode 100644 index 0000000..6cad914 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/METADATA @@ -0,0 +1,141 @@ +Metadata-Version: 2.1 +Name: cryptography +Version: 37.0.4 +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Home-page: https://github.com/pyca/cryptography +Author: The Python Cryptographic Authority and individual contributors +Author-email: cryptography-dev@python.org +License: BSD-3-Clause OR Apache-2.0 +Project-URL: Documentation, https://cryptography.io/ +Project-URL: Source, https://github.com/pyca/cryptography/ +Project-URL: Issues, https://github.com/pyca/cryptography/issues +Project-URL: Changelog, https://cryptography.io/en/latest/changelog/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +License-File: LICENSE.PSF +Requires-Dist: cffi (>=1.12) +Provides-Extra: docs +Requires-Dist: sphinx (!=1.8.0,!=3.1.0,!=3.1.1,>=1.6.5) ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme ; extra == 'docs' +Provides-Extra: docstest +Requires-Dist: pyenchant (>=1.6.11) ; extra == 'docstest' +Requires-Dist: twine (>=1.12.0) ; extra == 'docstest' +Requires-Dist: sphinxcontrib-spelling (>=4.0.1) ; extra == 'docstest' +Provides-Extra: pep8test +Requires-Dist: black ; extra == 'pep8test' +Requires-Dist: flake8 ; extra == 'pep8test' +Requires-Dist: flake8-import-order ; extra == 'pep8test' +Requires-Dist: pep8-naming ; extra == 'pep8test' +Provides-Extra: sdist +Requires-Dist: setuptools-rust (>=0.11.4) ; extra == 'sdist' +Provides-Extra: ssh +Requires-Dist: bcrypt (>=3.1.5) ; extra == 'ssh' +Provides-Extra: test +Requires-Dist: pytest (>=6.2.0) ; extra == 'test' +Requires-Dist: pytest-benchmark ; extra == 'test' +Requires-Dist: pytest-cov ; extra == 'test' +Requires-Dist: pytest-subtests ; extra == 'test' +Requires-Dist: pytest-xdist ; extra == 'test' +Requires-Dist: pretend ; extra == 'test' +Requires-Dist: iso8601 ; extra == 'test' +Requires-Dist: pytz ; extra == 'test' +Requires-Dist: hypothesis (!=3.79.2,>=1.11.4) ; extra == 'test' + +pyca/cryptography +================= + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.org/project/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main + :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain + +.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=main + :target: https://codecov.io/github/pyca/cryptography?branch=main + + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 3.6+ and PyPy3 7.2+. + +``cryptography`` includes both high level recipes and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests, and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + '...' + >>> f.decrypt(token) + 'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +You can install ``cryptography`` with: + +.. code-block:: console + + $ pip install cryptography + +For full details see `the installation documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get +involved. + +Security +~~~~~~~~ + +Need to report a security issue? Please consult our `security reporting`_ +documentation. + + +.. _`documentation`: https://cryptography.io/ +.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _`security reporting`: https://cryptography.io/en/latest/security/ + + diff --git a/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/RECORD b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/RECORD new file mode 100644 index 0000000..b9d46e4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/RECORD @@ -0,0 +1,182 @@ +cryptography-37.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cryptography-37.0.4.dist-info/LICENSE,sha256=Q9rSzHUqtyHNmp827OcPtTq3cTVR8tPYaU2OjFoG1uI,323 +cryptography-37.0.4.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 +cryptography-37.0.4.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 +cryptography-37.0.4.dist-info/LICENSE.PSF,sha256=aT7ApmKzn5laTyUrA6YiKUVHDBtvEsoCkY5O_g32S58,2415 +cryptography-37.0.4.dist-info/METADATA,sha256=Kb2UNR81590eIkVsCic1MfyU25jBTS2b4sTvaivzkT0,5454 +cryptography-37.0.4.dist-info/RECORD,, +cryptography-37.0.4.dist-info/WHEEL,sha256=f3qBETQEkAw2gp_UK66CyyDRJ0NORRrrePJfdEDGBj4,112 +cryptography-37.0.4.dist-info/top_level.txt,sha256=zYbdX67v4JFZPfsaNue7ZV4-mgoRqYCAhMsNgt22LqA,22 +cryptography/__about__.py,sha256=--Ir4xTpXvJIAKXDX9hap3KQf2E9--cHRoaKB-fb_cY,417 +cryptography/__init__.py,sha256=j08JCN_u_m8eL-zxbXRxgsriW6Oe29oSSo_e2hyyasg,748 +cryptography/__pycache__/__about__.cpython-38.pyc,, +cryptography/__pycache__/__init__.cpython-38.pyc,, +cryptography/__pycache__/exceptions.cpython-38.pyc,, +cryptography/__pycache__/fernet.cpython-38.pyc,, +cryptography/__pycache__/utils.cpython-38.pyc,, +cryptography/exceptions.py,sha256=sN_VVTF_LuKMM6R-lIASFFuzAmz1uZ2Qbcdko9WyS64,1471 +cryptography/fernet.py,sha256=W1NZKgmIxX-iMJr6GW-vm-j4BkYACKxHjrq4cbIZMaw,6604 +cryptography/hazmat/__init__.py,sha256=OYlvgprzULzZlsf3yYTsd6VUVyQmpsbHjgJdNnsyRwE,418 +cryptography/hazmat/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/__pycache__/_oid.cpython-38.pyc,, +cryptography/hazmat/_oid.py,sha256=CrOgnohV6ckWZU7ZFUYvgf_Gj8npC7LhJfzBB6wLHYc,15440 +cryptography/hazmat/backends/__init__.py,sha256=bgrjB1SX2vXX-rmfG7A4PqGkq-isqQVXGaZtjWHAgj0,324 +cryptography/hazmat/backends/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__init__.py,sha256=7rpz1Z3eV9vZy_d2iLrwC8Oz0vEruDFrjJlc6W2ZDXA,271 +cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-38.pyc,, +cryptography/hazmat/backends/openssl/aead.py,sha256=1GASyrJPO8a-mDPTT7VJZXPb_0zEdrkW-Wu_rxV-6RQ,8442 +cryptography/hazmat/backends/openssl/backend.py,sha256=XBpLgVxVYvXwmkdcCwd0bVQWK0EBTHTbCFDsLGsBbBg,96584 +cryptography/hazmat/backends/openssl/ciphers.py,sha256=n3rrPQZi1blJBKqIWeMG6-U6YTvEb8rXGQKn8i-kFog,10342 +cryptography/hazmat/backends/openssl/cmac.py,sha256=K5-S0H72KHZH-WPAcHL5jCtcNyoBZSMe7VmxGn8_VWA,3005 +cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=nSqtgO5MJVf_UUkvw9tez10zhGnsGHq24OP1X2GKOe4,1113 +cryptography/hazmat/backends/openssl/dh.py,sha256=9fwPordToELTkeJ-c7TuO9NiE1vfUBejk2QEUZbvo4s,12230 +cryptography/hazmat/backends/openssl/dsa.py,sha256=awfP80ykAfb4C_I-aOo-PnGU1DF6uf8bnEi-jld18ec,8888 +cryptography/hazmat/backends/openssl/ec.py,sha256=kgxwW508FTXDwGG-7pSywBLlICZKKfo4bcTHnNpsvJY,11103 +cryptography/hazmat/backends/openssl/ed25519.py,sha256=irHT-jSbpTNMMHqw5T885uzAi3Syf3kaaHuTnKgQPSg,5920 +cryptography/hazmat/backends/openssl/ed448.py,sha256=K8HDEiXl98QGJ-4llT4SVZf5-xe8aCuci00DkZf0lhw,5874 +cryptography/hazmat/backends/openssl/encode_asn1.py,sha256=4RUYVTpkYh6J1BnmYdr3G8xv4X1H-K2k2-fQoIDkpHI,570 +cryptography/hazmat/backends/openssl/hashes.py,sha256=3L5bkCOo2LbRSVNGLca_9rpCZ2zb8ISBrMLtts1BkEw,3241 +cryptography/hazmat/backends/openssl/hmac.py,sha256=9RX8bo9ywJievoodxjmqCXmD2iUWyH2jBmw78Hb-pOY,3095 +cryptography/hazmat/backends/openssl/poly1305.py,sha256=_qyGCXNaQVCFpa1qjb_9UtsI6lmnki_15Jbc5vihbeE,2514 +cryptography/hazmat/backends/openssl/rsa.py,sha256=KK97C_jBJEUGfKfJK7E8QZ-uZb6DTJjX6dLOuVbqTI8,20720 +cryptography/hazmat/backends/openssl/utils.py,sha256=7Ara81KkY0QCLPqW6kUG9dEsp52cZ3kOUJczwEpecJ0,1977 +cryptography/hazmat/backends/openssl/x25519.py,sha256=oA_ao4o27ki_OAx0UXNeI2ItZ84Xg_li7It1DxFlrZ0,4753 +cryptography/hazmat/backends/openssl/x448.py,sha256=a_zgqGUpGFvyKEoKRR1vgNdD_gk1gxGYpBp1a6x9HuE,4338 +cryptography/hazmat/backends/openssl/x509.py,sha256=WUoRC6UDM9FkOdn6xR5Mk-v_WCq7eJryenGN9ni8L-A,1452 +cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/bindings/_openssl.abi3.so,sha256=ve_aeP0LRBDKeLMIFZHefDaeyl_qNpFckeq3pFXpVIY,8032104 +cryptography/hazmat/bindings/_rust.abi3.so,sha256=bZ1mlz1l1tNP4CVdX5h9YFCA84pR7RmMyjRvgEIi8C4,3624096 +cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=ga5QLYp8MmumQB-Rp4TGHq_NAqONcXTrLm2712TZ9Ms,103 +cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=Hovrt8dXZ9p8BKHaroPYmunu1VtDuirslJnE4jTG28s,411 +cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=jATWMh1yz5JpnnT7A10_sbY-ja5zARnOpZaToLqm43Y,768 +cryptography/hazmat/bindings/_rust/x509.pyi,sha256=YdnUA9uu60WhQP5QihTgo2pBrT49wxYtayCZ4trgZAg,1497 +cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-38.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-38.pyc,, +cryptography/hazmat/bindings/openssl/_conditional.py,sha256=K0JIsYkDBifV-x5WDeq9M1Hofr6HW667rDFlhDEiIMQ,10078 +cryptography/hazmat/bindings/openssl/binding.py,sha256=hWWp-N_JMUuYaNuRHgpadEEdVJsL-wAhdQpTj-Pi0Vc,8044 +cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/_serialization.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/cmac.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/constant_time.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/hashes.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/hmac.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/keywrap.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/padding.cpython-38.pyc,, +cryptography/hazmat/primitives/__pycache__/poly1305.cpython-38.pyc,, +cryptography/hazmat/primitives/_asymmetric.py,sha256=nVJwmxkakirAXfFp410pC4kY_CinzN5FSJwhEn2IE34,485 +cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=zd7N8rBYWaf8tPM7GDtZ9vUgarK_P0_PUNCFi3A0u0c,1016 +cryptography/hazmat/primitives/_serialization.py,sha256=OC_uXC5cNHucoOkHuTsZbfcQ9bvZs1cq7b18TcJu4Es,1341 +cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-38.pyc,, +cryptography/hazmat/primitives/asymmetric/dh.py,sha256=pjjgKFcn2bCAaL_5zr0ygwXM8pHJFyAO6koJFkzhQb8,6604 +cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=dIo6lYiHWRWUCxwejAi01w1-3jjmzEuJovqaVqDO3_g,7870 +cryptography/hazmat/primitives/asymmetric/ec.py,sha256=wX8wH9bD7g7-YxmINam_s9cPc9RUPrui2QdIKg6Q3Nc,14468 +cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=1qOl1UWV_-cXKHhwlFSyPBdhpx2HMDRukkI6eI5i8vM,2728 +cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=oR-j4jGcWUnGxWi1GygHxVZbgkSOKHsR6y1E3Lf6wYM,2647 +cryptography/hazmat/primitives/asymmetric/padding.py,sha256=EkKuY9e6UFqSuQ0LvyKYKl_L19tOfNCTlHWEiKgHeUc,2690 +cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=GiCIuBuJdpeew-yJ7mnTF4KFH_FUJaut1r-d6TRs31s,11322 +cryptography/hazmat/primitives/asymmetric/types.py,sha256=VidxjUWPOMB8q8vGiaXMhY715Zw8U9Vd4_rkqjOagRE,1814 +cryptography/hazmat/primitives/asymmetric/utils.py,sha256=5hD4KjfMbmozeFq08PLVunHr4FgeVzV1NkKalECM26s,756 +cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=-nbaGlgT1sufO9Ic-urwKDql8Da0U3GL6hZJIMqHgVc,2588 +cryptography/hazmat/primitives/asymmetric/x448.py,sha256=V3lxb1VOiRTa3bzVUC3uZat2ogfExUOdktCIGUUMZ2Y,2556 +cryptography/hazmat/primitives/ciphers/__init__.py,sha256=Qp78Y3PDSRfwp7DDa3pezlLrED_QFhic_LvDw4LM9ZQ,646 +cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-38.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-38.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-38.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-38.pyc,, +cryptography/hazmat/primitives/ciphers/aead.py,sha256=QnJD2doZ8XdpCIrDwqJBNgaw2eG9Tx4FWirIP159MAg,11488 +cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=GoM_c2LNonci43B06g0e9zXW6PfBh_uAFiZhIxrQ_x4,4677 +cryptography/hazmat/primitives/ciphers/base.py,sha256=AiCYCzXbSZ9wQbXWMYc60IKmzqz5619YdaaF0zVr4rY,8251 +cryptography/hazmat/primitives/ciphers/modes.py,sha256=Pb2h2X0HUVfM6xKdSZjCgCsUcCQ32mDTSTRQQl4-1Gs,7988 +cryptography/hazmat/primitives/cmac.py,sha256=ODkc7EonY1cRxyJ0SYOuwtiYQv6B0ZPxJQm3rXxfXd4,2037 +cryptography/hazmat/primitives/constant_time.py,sha256=6bkW00QjhKusdgsQbexXhMlGX0XRN59XNmxWS2W38NA,387 +cryptography/hazmat/primitives/hashes.py,sha256=cpaYjgkazlq7Xw0MVoR3cp17mD0TgyEvhZQbyoAWHzU,5996 +cryptography/hazmat/primitives/hmac.py,sha256=M_sa4smPIkO8ra17Xl_cM0daRhGCozUu_8gnHePEIb0,2131 +cryptography/hazmat/primitives/kdf/__init__.py,sha256=DcZhzfLG8d8IYBH771lGTVU5S87OQDpu3nrfOwZnsmA,715 +cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-38.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-38.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-38.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-38.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-38.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-38.pyc,, +cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=5YXw8cLZCBYT6rVDGS5URQEeFiPW-ZRBRcPdZQIxTMA,3772 +cryptography/hazmat/primitives/kdf/hkdf.py,sha256=LlDQbCvlNzuLa_UJXrkG5fXGjAjor5Wunv2378TBmms,3031 +cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=QmgJw2_l0D21DEMMfuNQ6e1IaLV3bjwOzMJEAXhpOVs,7699 +cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=wEMH4CJfPccCg9apQLXyWUWBrZLTpYLLnoZEnzvaHQo,2032 +cryptography/hazmat/primitives/kdf/scrypt.py,sha256=JvX_cD0o0Op5EcFNeZhr-vI5sYv_LdnJ6kNEbW3u5ow,2228 +cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=JsdrJhw2IJVYkl8JIWUN66h7DrKZM2RoQ_tw_iKAvdI,2018 +cryptography/hazmat/primitives/keywrap.py,sha256=TWqyG9K7k-Ymq4kcIw7u3NIKUPVDtv6bimwxIJYTe20,5643 +cryptography/hazmat/primitives/padding.py,sha256=xruasOE5Cd8KEQ-yp9W6v9WKPvKH-GudHCPKQ7A8HfI,6207 +cryptography/hazmat/primitives/poly1305.py,sha256=QvxPMrqjgKJt0mOZSeZKk4NcxsNCd2kgfI-X1CmyUW4,1837 +cryptography/hazmat/primitives/serialization/__init__.py,sha256=RALEthF7wRjlMyTvSq09XmKQey74tsSdDCCsDaD6yQU,1129 +cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-38.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-38.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-38.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-38.pyc,, +cryptography/hazmat/primitives/serialization/base.py,sha256=yw8_yzIvruT6fmS-KrTmIXbAF00rItH48WXTPOSLdJ4,1761 +cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=tHQlCKOY0EfOBBiqR_et4TgcDY_OAtRENC69arjvyLU,6481 +cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=LnISP-1SEDXCpsoEbR0EfuIlWm8eJAgWupt0gvHyyIU,5870 +cryptography/hazmat/primitives/serialization/ssh.py,sha256=Bp4M8yFrLnw9Oj3jCbttewovymCX_OGzBg5GBw4RPpA,23923 +cryptography/hazmat/primitives/twofactor/__init__.py,sha256=ZHo4zwWidFP2RWFl8luiNuYkVMZPghzx54izPNSCtD4,222 +cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-38.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-38.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-38.pyc,, +cryptography/hazmat/primitives/twofactor/hotp.py,sha256=v4wkTbdc1E53POx6pdNnEUBvANbmt4f6scQSsTgABeU,2989 +cryptography/hazmat/primitives/twofactor/totp.py,sha256=bIIxOI-LcLGNahB5kN7A_TwEyYMTsLjHd8eJc4b2cLg,1449 +cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cryptography/utils.py,sha256=v3zj8FS2bHYAmH__CECwfa0flmFCQnGrxZBRQcDys1A,5627 +cryptography/x509/__init__.py,sha256=yC0TbuvPmWL1U4rEY-0m46SayuxCfPVNFWjJJdi5lY0,7654 +cryptography/x509/__pycache__/__init__.cpython-38.pyc,, +cryptography/x509/__pycache__/base.cpython-38.pyc,, +cryptography/x509/__pycache__/certificate_transparency.cpython-38.pyc,, +cryptography/x509/__pycache__/extensions.cpython-38.pyc,, +cryptography/x509/__pycache__/general_name.cpython-38.pyc,, +cryptography/x509/__pycache__/name.cpython-38.pyc,, +cryptography/x509/__pycache__/ocsp.cpython-38.pyc,, +cryptography/x509/__pycache__/oid.cpython-38.pyc,, +cryptography/x509/base.py,sha256=FNBVG8ACCBZLF5TXlq1vjEPWtlR5YZ1Bs25AIxEHI34,33747 +cryptography/x509/certificate_transparency.py,sha256=Elm_-GGA6k9zrcm5KYVY5uTirDsvGc_BUuTLR7Hu-K4,1119 +cryptography/x509/extensions.py,sha256=LFy1jgd_0Z_P-lE2-07nZGJmsFh_1cOOTAl9ejyXFHQ,64999 +cryptography/x509/general_name.py,sha256=S_kJd4ZsNGrMfi2osfFJEWqPxy3oPCAWpLb91yhxzPs,7896 +cryptography/x509/name.py,sha256=EOtO9CscxrfsxhO6GTEfVhiZo3_EE7qGIFfv1eeI4-U,14200 +cryptography/x509/ocsp.py,sha256=OQKsqW_Y4mWY53UT_JG79RJR19xt53Q-iQSSw4m0kZM,16691 +cryptography/x509/oid.py,sha256=CLIlQwzE3PQXMvkKep4JbzVUaRDl_stwcX_U6-s2cNw,794 diff --git a/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/WHEEL b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/WHEEL new file mode 120000 index 0000000..596dcbc --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7f/7a/81/113404900c36829fd42bae82cb20d127434e451aeb78f25f7440c6063e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/top_level.txt new file mode 120000 index 0000000..c068d3e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography-37.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cd/86/dd/5faeefe091593dfb1a36e7bb655e3e9a0a11a9808084cb0d82ddb62ea0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/__about__.py b/venv/lib/python3.8/site-packages/cryptography/__about__.py new file mode 100644 index 0000000..399beb6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/__about__.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +__all__ = [ + "__version__", + "__author__", + "__copyright__", +] + +__version__ = "37.0.4" + +__author__ = "The Python Cryptographic Authority and individual contributors" +__copyright__ = "Copyright 2013-2022 {}".format(__author__) diff --git a/venv/lib/python3.8/site-packages/cryptography/__init__.py b/venv/lib/python3.8/site-packages/cryptography/__init__.py new file mode 120000 index 0000000..7f821c9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8f/4f/09/08dfeefe6f1e2fecf16d747182cae25ba39edbda124a8fdeda1cb26ac8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/__pycache__/__about__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/__pycache__/__about__.cpython-38.pyc new file mode 100644 index 0000000..db64bd2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/__pycache__/__about__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..69ad648 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..1ee516b Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/__pycache__/fernet.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/__pycache__/fernet.cpython-38.pyc new file mode 100644 index 0000000..d87e7b1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/__pycache__/fernet.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..a2f4be6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/__pycache__/utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/exceptions.py b/venv/lib/python3.8/site-packages/cryptography/exceptions.py new file mode 120000 index 0000000..5dfd0c1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/exceptions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b0/df/d5/55317f2ee28c33a47e948012145bb3026cf5b99d906dc764a3d5b24bae \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/fernet.py b/venv/lib/python3.8/site-packages/cryptography/fernet.py new file mode 120000 index 0000000..468e202 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/fernet.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5b/53/59/2a0988c57fa2309afa196faf9be8f806460008ac478ebab871b21931ac \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/__init__.py new file mode 120000 index 0000000..bd59ecb --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/39/89/6f/829af350bcd996c7f7c984ec77a554572426a6c6c78e025d367b324701 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..934d432 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-38.pyc new file mode 100644 index 0000000..26e026e Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/_oid.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/_oid.py new file mode 120000 index 0000000..fa02bb7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/_oid.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0a/b3/a0/9e8855e9c916654ed915462f81ffc68fc9e90bb2e125fcc107ac0b1d87 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/__init__.py new file mode 120000 index 0000000..8934a3d --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6e/0a/e3/075497daf5d7fab99f1bb0383ea1a4abe8aca9055719a66d8d61c0823d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..a05763a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 120000 index 0000000..85b3440 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ee/ba/73/d59dde57dbd9cbf77688baf00bc3b3d2f12bb8316b8c995ce96d990d70 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..7f7b281 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-38.pyc new file mode 100644 index 0000000..184b2bb Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-38.pyc new file mode 100644 index 0000000..5cb4d18 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-38.pyc new file mode 100644 index 0000000..648ee91 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-38.pyc new file mode 100644 index 0000000..15330ce Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-38.pyc new file mode 100644 index 0000000..098ab40 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-38.pyc new file mode 100644 index 0000000..ca23a26 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-38.pyc new file mode 100644 index 0000000..2b320b4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-38.pyc new file mode 100644 index 0000000..aa6b31d Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-38.pyc new file mode 100644 index 0000000..b489847 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-38.pyc new file mode 100644 index 0000000..2bdacd4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-38.pyc new file mode 100644 index 0000000..df494e0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-38.pyc new file mode 100644 index 0000000..f29974b Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-38.pyc new file mode 100644 index 0000000..316af7f Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-38.pyc new file mode 100644 index 0000000..2a4fc46 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-38.pyc new file mode 100644 index 0000000..cb3391f Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..02f7b31 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-38.pyc new file mode 100644 index 0000000..b35033f Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-38.pyc new file mode 100644 index 0000000..f5f66a8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-38.pyc new file mode 100644 index 0000000..afd6c1d Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/aead.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/aead.py new file mode 120000 index 0000000..f5ca18d --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/aead.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d4/60/12/cab24f3bc6be9833d34fb5496573dbff4cc476b916f96bbfaf157ee914 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/backend.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 120000 index 0000000..4fa0b87 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5c/1a/4b/815c5562f5f09a475c0b07746d54162b41014c74db0850ec2c6b016c18 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ciphers.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ciphers.py new file mode 120000 index 0000000..d153f82 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ciphers.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9f/7a/eb/3d0662d5b94904aa8859e306ebe53a613bc46fcad71902a7f22fa41688 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/cmac.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/cmac.py new file mode 120000 index 0000000..6181f08 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/cmac.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2b/9f/92/d07ef6287647f963c07072f98c2b5c372a0165231eed59b11a7f3f5560 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py new file mode 120000 index 0000000..d925227 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9d/2a/ad/80ee4c2557ff51492fc3db5ecf5d338469ec187ab6e0e3f55f618a39ee \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/dh.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/dh.py new file mode 120000 index 0000000..f4d3629 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/dh.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f5/fc/0f/a2b753a042d391e27e73b4ee3bd362135bdf5017a39364045196efa38b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/dsa.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/dsa.py new file mode 120000 index 0000000..55369bb --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/dsa.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6b/07/cf/f34ca401f6f80bf23e68ea3e3e7194d4317ab9ff1b9c48be8e5775f1e7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ec.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ec.py new file mode 120000 index 0000000..2f9a0ea --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ec.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/92/0c/70/5b9d3c1535c3c061beee94b2c012e520264a29fa386dc4c79cda6cbc96 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ed25519.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ed25519.py new file mode 120000 index 0000000..89110d4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ed25519.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8a/b1/d3/fa349ba5334c307ab0e53f3ce6ecc08b74b27f791a687b939ca8103d28 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ed448.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ed448.py new file mode 120000 index 0000000..71de573 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/ed448.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2b/c1/c3/1225e5f7c40627ee25953e125597f9fb17bc682b9c8b4d039197f4961c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/encode_asn1.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/encode_asn1.py new file mode 120000 index 0000000..82dafba --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/encode_asn1.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e1/15/18/553a64621e89d419e661daf71bcc6fe17d47f8ada4dbe7d0a080e4a472 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/hashes.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/hashes.py new file mode 120000 index 0000000..cd0de9b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/hashes.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/dc/be/5b/9023a8d8b6d14953462dc6bff6ba42676cdbf08481acc2edb6cd41904c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/hmac.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/hmac.py new file mode 120000 index 0000000..5b9658e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/hmac.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f5/15/fc/6e8f72c0989ebe8a1dc639aa097983da2516c87da3066c3bf076fea4e6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/poly1305.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/poly1305.py new file mode 120000 index 0000000..4a9ea14 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/poly1305.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fe/ac/86/09735a415085a5ad6a8dbffd52db08ea59a7922ff5e496dce6f8a16de1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/rsa.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/rsa.py new file mode 120000 index 0000000..3bb370a --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/rsa.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/28/af/7b/0bf8c12445067ca7c92bb13c419fae65be834c98d7e9d2ceb956ea4c8f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/utils.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/utils.py new file mode 120000 index 0000000..7e4922e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ec/0a/da/f352a46344022cfa96ea4506f5d12ca79d9c67790e509733c04a5e709d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/x25519.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/x25519.py new file mode 120000 index 0000000..d0698ab --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/x25519.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a0/0f/da/a38a36ee48bf380c7451735e23622d67ce1783f962ec8b750f1165ad9d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/x448.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/x448.py new file mode 120000 index 0000000..28b7158 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/x448.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6b/fc/e0/a86529185bf2284a0a451d6f80d743fe0935831198a41a756bac7d1ee1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/x509.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/x509.py new file mode 120000 index 0000000..eddaef2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/x509.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/59/4a/11/0ba50333d16439d9fac51e4c93ebff582abb789af27a718df678bc2fe0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 120000 index 0000000..a9e9e5c --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b3/da/0a/090db2705757a0445d4b58a669fb9e4a406c2fd92f6f27e085a6ae67d6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..fbb4687 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_openssl.abi3.so b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_openssl.abi3.so new file mode 100755 index 0000000..a658b50 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_openssl.abi3.so differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust.abi3.so new file mode 100755 index 0000000..6c23c80 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust.abi3.so differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi new file mode 120000 index 0000000..7e558d3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/81/ae/50/2d8a7c326ba6401f91a784c61eafcd02a38d7174eb2e6dbbd764d9f4cb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi new file mode 120000 index 0000000..87bb774 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1e/8b/eb/b7c75767da7c04a1daae83d89ae9eed55b43ba2aec9499c4e234c6dbcb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi new file mode 120000 index 0000000..7e5f745 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8c/04/d6/321d72cf92699e74fb035d3fb1b63e8dae730119cea59693a0baa6e376 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi new file mode 120000 index 0000000..5fbcec8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/61/d9/d4/03dbaeeb45a140fe508a14e0a36a41ad3e3dc3162d6b2099e2dae06408 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 120000 index 0000000..a9e9e5c --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b3/da/0a/090db2705757a0445d4b58a669fb9e4a406c2fd92f6f27e085a6ae67d6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..3c4c284 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-38.pyc new file mode 100644 index 0000000..46fedf5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-38.pyc new file mode 100644 index 0000000..c195513 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 120000 index 0000000..4e43165 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2b/42/48/b189030627d5fb1e560deabd3351e87ebe875baebbac316584312220c4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 120000 index 0000000..00d91cb --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/85/65/a9/f8dfc9314b9868db911e0a5a74411d549b0bfb0021750a538fe3e2d157 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 120000 index 0000000..a9e9e5c --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b3/da/0a/090db2705757a0445d4b58a669fb9e4a406c2fd92f6f27e085a6ae67d6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4949f13 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-38.pyc new file mode 100644 index 0000000..a3e351a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-38.pyc new file mode 100644 index 0000000..06af976 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-38.pyc new file mode 100644 index 0000000..948a983 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-38.pyc new file mode 100644 index 0000000..c88c542 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-38.pyc new file mode 100644 index 0000000..97006cd Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-38.pyc new file mode 100644 index 0000000..baef90b Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-38.pyc new file mode 100644 index 0000000..01b24ce Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-38.pyc new file mode 100644 index 0000000..5098a71 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-38.pyc new file mode 100644 index 0000000..c33032f Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-38.pyc new file mode 100644 index 0000000..2d8e5ee Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/_asymmetric.py new file mode 120000 index 0000000..c363122 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/_asymmetric.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9d/52/70/9b191a922ac05df169e35d290b8918fc28a7ccde45489c21127d88137e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py new file mode 120000 index 0000000..2961a6b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cd/de/cd/f2b05859a7fcb4f33b183b59f6f5206ab2bf3f4fcf50d0858b7034bb47 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/_serialization.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/_serialization.py new file mode 120000 index 0000000..495439c --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/_serialization.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/38/2f/ee/5c2e5c347b9ca0e907b93b196df710f5bbd9b3572aedbd7c4dc26ee04b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 120000 index 0000000..a9e9e5c --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b3/da/0a/090db2705757a0445d4b58a669fb9e4a406c2fd92f6f27e085a6ae67d6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..aa7dfbb Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-38.pyc new file mode 100644 index 0000000..16e5840 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-38.pyc new file mode 100644 index 0000000..7d3fb3a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-38.pyc new file mode 100644 index 0000000..f8ee106 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-38.pyc new file mode 100644 index 0000000..29a779a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-38.pyc new file mode 100644 index 0000000..574cd6d Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-38.pyc new file mode 100644 index 0000000..3bba3c7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-38.pyc new file mode 100644 index 0000000..fda4a5b Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-38.pyc new file mode 100644 index 0000000..8abb8f9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..e1b7010 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-38.pyc new file mode 100644 index 0000000..3d1126d Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-38.pyc new file mode 100644 index 0000000..c78806e Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 120000 index 0000000..8e19895 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a6/38/e0/285727d9b08068bff9cebd328305ccf291c917200eea4a09164ce141bf \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 120000 index 0000000..b20a79b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/74/8a/3a/9588875915940b1c1e8c08b4d70d7ede38e6cc4b89a2fa9a56a0cedff8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 120000 index 0000000..5214931 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c1/7f/30/1fd6c3ee0efe63198835a9bfb3d70f73d4543ebba2d907482a0e90dcd7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py new file mode 120000 index 0000000..fee8d3f --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d6/a3/a5/d54595ffe7172878709454b23c1761a71d8730346e92423a788e62f2f3 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py new file mode 120000 index 0000000..6dec948 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a1/1f/a3/e2319c5949c6c568b51b2807c5565b82448e287b11eb2d44dcb7fac183 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 120000 index 0000000..efd3f39 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/12/42/ae/63d7ba505a92b90d0bbf22982a5fcbd7db4e7cd09394758488a8077947 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 120000 index 0000000..ff6968f --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1a/20/88/b81b8976979ec3ec89ee69d31782851ff15425abadd6bf9de9346cdf5b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/types.py new file mode 120000 index 0000000..8f00ebd --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/types.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/56/27/71/8d458f38c07cabcbc689a5cc858ef5e59c3c53d55de3fae4aa339a8111 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 120000 index 0000000..5ab6684 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e6/10/f8/2a37cc6e6a33785ab4f0f2d5ba71ebe0581e57357536429a94408cdbab \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py new file mode 120000 index 0000000..5d728a8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fa/76/da/1a5813d6cb9f3bd21cfaeaf0283aa5f036b453718bea164920ca878157 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py new file mode 120000 index 0000000..336c844 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/57/79/71/6f554e8914daddbcd5502dee65ab76a207c4c5439d92d08819450c6766 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 120000 index 0000000..c868938 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/42/9e/fc/6373c34917f0a7b0c36b7a5ece52eb103fd016189cfcbbc3c382ccf594 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..432c3c0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-38.pyc new file mode 100644 index 0000000..c618078 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-38.pyc new file mode 100644 index 0000000..a893004 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000..43e2351 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-38.pyc new file mode 100644 index 0000000..d240934 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/aead.py new file mode 120000 index 0000000..8edd082 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/aead.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/42/72/43/d9da19f17769088ac3c2a2413606b0d9e1bd4f1e055a2ac83f5e7d3008 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 120000 index 0000000..a0467f7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1a/83/3f/7362cda27722e37074ea0d1ef735d6e8f7c187fb80162661231ad0ff1e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/base.py new file mode 120000 index 0000000..1c40b64 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/02/20/98/0b35db499f7041b5d631873ad082a6ceacf9eb5f5875a685d3356be2b6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 120000 index 0000000..a5588b2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3d/bd/a1/d97d075157cceb129d4998c2802b14702437da60d3493450425e3ed46b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/cmac.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/cmac.py new file mode 120000 index 0000000..bb9764b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/38/39/1c/ec4a27635711c722744983aec2d89842fe81d193f12509b7ad7c5f5dde \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/constant_time.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 120000 index 0000000..d448583 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e9/b9/16/d3442384abac760b106dec5784c9465f45d1379f57366c564b65b7f0d0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/hashes.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/hashes.py new file mode 120000 index 0000000..ae4121b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/72/96/98/8e091ace5abb5f0d0c568477729d7b983d1383212f85941bca80161f35 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/hmac.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/hmac.py new file mode 120000 index 0000000..8b21084 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/33/fb/1a/e2c98f2243bcadad7b5e5fdc33475a461182a3352effc8271de3c421bd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 120000 index 0000000..29c8f71 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0d/c6/61/cdf2c6f1df086011fbef59464d55394bcece403a6ede7adf3b0667b260 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..176ada2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-38.pyc new file mode 100644 index 0000000..05c0e2e Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-38.pyc new file mode 100644 index 0000000..3588f40 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-38.pyc new file mode 100644 index 0000000..d9303f7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-38.pyc new file mode 100644 index 0000000..68768d2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-38.pyc new file mode 100644 index 0000000..dfd50b4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-38.pyc new file mode 100644 index 0000000..7561f4a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 120000 index 0000000..283545f --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e5/85/f0/f1c2d9081613eab543192e5445011e1623d6f9944145c3dd6502314cc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 120000 index 0000000..2f49e5a --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2e/50/d0/6c2be5373b8b6bf5095eb906e5f5c68c08e8af95ae9efdb7efc4c19a6b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py new file mode 120000 index 0000000..2282470 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/42/68/09/c36fe5d03db50c430c7ee350e9ed4868b5776e3c0eccc244017869395b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 120000 index 0000000..cceeeed --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c0/43/07/e0225f3dc70283d6a940b5f2594581ad92d3a582cb9e86449f3bda1d0a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py new file mode 120000 index 0000000..4f8e0d8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/26/f5/ff/703d28d0ea7911c14d79986bfaf239b18bff2dd9c9ea43446d6deee68c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 120000 index 0000000..25fc348 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/26/c7/6b/261c36209558925f0921650deba87b0eb29933646843fb70fe2280bdd2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/keywrap.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/keywrap.py new file mode 120000 index 0000000..48357f8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4d/6a/b2/1bd2bb93e626ab891c230eeedcd20a50f543b6fe9b8a6c312096137b6d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/padding.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/padding.py new file mode 120000 index 0000000..83e374a --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/padding.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c6/bb/9a/b0e13909df0a110fb2a7d5babfd58a3ef287f86b9d1c23ca43b03c1df2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/poly1305.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/poly1305.py new file mode 120000 index 0000000..2807cb9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/poly1305.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/42/fc/4f/32baa380a26dd2639949e64a93835cc6c3427769207c8f97d429b2516e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__init__.py new file mode 120000 index 0000000..7a517ca --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/44/02/c4/b6117bc118e53324ef4aad3d5e62907b2ef8b6c49d0c20ac0da0fac905 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..a844425 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000..004cb60 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-38.pyc new file mode 100644 index 0000000..9fbc59a Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-38.pyc new file mode 100644 index 0000000..1492916 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-38.pyc new file mode 100644 index 0000000..f48a97e Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/base.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/base.py new file mode 120000 index 0000000..cc32d64 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/base.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cb/0f/3f/cb322faee4fa7e64be2ab4e62176c0174d2b22d1f8f165d33ce48b749e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py new file mode 120000 index 0000000..536c18e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b4/74/25/08a398d047ce0418aa47f7ade1381c0d8fce02d444342ebd6ab8efc8b5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py new file mode 120000 index 0000000..30635c3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2e/72/12/3fed521035c2a6ca046d1d047ee2255a6f1e240816ba9b7482f1f2c885 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/ssh.py new file mode 120000 index 0000000..9f434a9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/serialization/ssh.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/06/9e/0c/f3216b2e7c3d3a3de309bb6d7b0a2fca6097fce1b3060e46070e113e90 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 120000 index 0000000..6e6b4a6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/64/7a/38/cf05a27453f6456165f25ba236e62454c64f821cf1e788b33cd482b43e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..7183185 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-38.pyc new file mode 100644 index 0000000..90bcdcc Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-38.pyc new file mode 100644 index 0000000..ad992dc Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 120000 index 0000000..c5b1447 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bf/8c/24/4db75cd44e773cec7aa5d36711406f00d6e6b787fab1c412b1380005e5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/totp.py new file mode 120000 index 0000000..020b0dc --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6c/82/31/388f8b70b18d6a107990dec0fd3c04c98313b0b8c777c7897386f670b8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/py.typed b/venv/lib/python3.8/site-packages/cryptography/py.typed new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/utils.py b/venv/lib/python3.8/site-packages/cryptography/utils.py new file mode 120000 index 0000000..58d710c --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bf/7c/e3/f054b66c7600987fff0840b07dad1f9661424271abc5905141c0f2b350 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/__init__.py b/venv/lib/python3.8/site-packages/cryptography/x509/__init__.py new file mode 120000 index 0000000..4a7cd54 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/x509/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c8/2d/13/6eebcf9962f5538ac463ed26e3a49acaec427cf54d1568c925d8b9958d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4604419 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/base.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000..3b2387e Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/base.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-38.pyc new file mode 100644 index 0000000..013d298 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/extensions.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/extensions.cpython-38.pyc new file mode 100644 index 0000000..b44d2c0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/extensions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/general_name.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/general_name.cpython-38.pyc new file mode 100644 index 0000000..b5bacb5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/general_name.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/name.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/name.cpython-38.pyc new file mode 100644 index 0000000..428ad36 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/name.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/ocsp.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/ocsp.cpython-38.pyc new file mode 100644 index 0000000..d669174 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/ocsp.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/oid.cpython-38.pyc b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/oid.cpython-38.pyc new file mode 100644 index 0000000..12b61a0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/cryptography/x509/__pycache__/oid.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/base.py b/venv/lib/python3.8/site-packages/cryptography/x509/base.py new file mode 120000 index 0000000..4b249a1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/x509/base.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/14/d0/55/1bc00208164b1794d796ad6f8c43d6b65479619d41b36e40231107237e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/certificate_transparency.py b/venv/lib/python3.8/site-packages/cryptography/x509/certificate_transparency.py new file mode 120000 index 0000000..7c80c30 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/x509/certificate_transparency.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/12/59/bf/f86180ea4f73adc9b9298558e6e4e2ac3b2f19cfc152e4cb47b1eef8ae \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/extensions.py b/venv/lib/python3.8/site-packages/cryptography/x509/extensions.py new file mode 120000 index 0000000..e0ff30f --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/x509/extensions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2c/5c/b5/8e077fd19fcffa5136fb4ee7646266b0587fd5c38e4c097d7a3c971474 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/general_name.py b/venv/lib/python3.8/site-packages/cryptography/x509/general_name.py new file mode 120000 index 0000000..9527d7e --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/x509/general_name.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4b/f9/09/77866c346acc7e2da8b1f149116a8fc72de83c2016a4b6fdd72871ccfb \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/name.py b/venv/lib/python3.8/site-packages/cryptography/x509/name.py new file mode 120000 index 0000000..2a76fb7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/x509/name.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/10/eb/4e/f42b1cc6b7ecc613ba19311f561899a37fc413ba862057efd5e788e3e5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/ocsp.py b/venv/lib/python3.8/site-packages/cryptography/x509/ocsp.py new file mode 120000 index 0000000..558959b --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/x509/ocsp.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/39/02/ac/a96fd8e26598e77513fc91bbf51251d7dc6de7743e890492c389b49193 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/cryptography/x509/oid.py b/venv/lib/python3.8/site-packages/cryptography/x509/oid.py new file mode 120000 index 0000000..461a71c --- /dev/null +++ b/venv/lib/python3.8/site-packages/cryptography/x509/oid.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/08/b2/25/430cc4dcf41732f90a7a9e096f35546910e5fecb70717fd4ebeb3670dc \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/LICENSE b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/LICENSE new file mode 120000 index 0000000..2099120 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/83/c3/ed/394e600067c17b7d1c09bd3ea20f0703b8c0716e6a9f414b99fcbdf810 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/METADATA b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/METADATA new file mode 100644 index 0000000..af8a42e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/METADATA @@ -0,0 +1,27 @@ +Metadata-Version: 2.1 +Name: debugpy +Version: 1.6.3 +Summary: An implementation of the Debug Adapter Protocol for Python +Home-page: https://aka.ms/debugpy +Author: Microsoft Corporation +Author-email: ptvshelp@microsoft.com +License: MIT +Project-URL: Source, https://github.com/microsoft/debugpy +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Software Development :: Debuggers +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: MacOS +Classifier: Operating System :: POSIX +Classifier: License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0) +Classifier: License :: OSI Approved :: MIT License +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE + +debugpy is an implementation of the Debug Adapter Protocol for Python. + +The source code and the issue tracker is [hosted on GitHub](https://github.com/microsoft/debugpy/). diff --git a/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/RECORD b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/RECORD new file mode 100644 index 0000000..c105274 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/RECORD @@ -0,0 +1,528 @@ +debugpy-1.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +debugpy-1.6.3.dist-info/LICENSE,sha256=g8PtOU5gAGfBe30cCb0-og8HA7jAcW5qn0FLmfy9-BA,1176 +debugpy-1.6.3.dist-info/METADATA,sha256=8mzOzvqjSM1ibrwJ-9luaffAX4aCBYkN7ox0qmdn3fk,1117 +debugpy-1.6.3.dist-info/RECORD,, +debugpy-1.6.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy-1.6.3.dist-info/WHEEL,sha256=6jd4iL1PdoODmcsbX84z5njbeYqvNy0oPQMQT3l84Qo,217 +debugpy-1.6.3.dist-info/direct_url.json,sha256=3AAp5hgBZxXC5Y_UhUrVWqOyyZhG6RDdx2E3CnMJ6Q4,328 +debugpy-1.6.3.dist-info/top_level.txt,sha256=6Om6JTEaqkWnj-9-7kJOJr988sTO6iSuiK4N9X6RLpg,8 +debugpy/ThirdPartyNotices.txt,sha256=WzmT853BlHOtkEiV_xtk96iazYeVDKWB2tqKMtcm_jo,34345 +debugpy/__init__.py,sha256=B621TRbcw1Pr4LrqSZB8Qr0CW9QfpbpcEz2eaN16jns,1018 +debugpy/__main__.py,sha256=feuxCZgFCWXu9rVRgySgWvbrS_HECrm_BvaSG8VPdDc,1829 +debugpy/__pycache__/__init__.cpython-38.pyc,, +debugpy/__pycache__/__main__.cpython-38.pyc,, +debugpy/__pycache__/_version.cpython-38.pyc,, +debugpy/__pycache__/public_api.cpython-38.pyc,, +debugpy/_vendored/__init__.py,sha256=cQGcZObOjPcKFDQk06oWNgqBHh2rXDCv0JTNISv3_rg,3878 +debugpy/_vendored/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/__pycache__/_pydevd_packaging.cpython-38.pyc,, +debugpy/_vendored/__pycache__/_util.cpython-38.pyc,, +debugpy/_vendored/__pycache__/force_pydevd.cpython-38.pyc,, +debugpy/_vendored/_pydevd_packaging.py,sha256=cYo9maxM8jNPj-vdvCtweaYAPX210Pg8-NHS8ZU02Mg,1245 +debugpy/_vendored/_util.py,sha256=E5k-21l2RXQHxl0C5YF5ZmZr-Fzgd9eNl1c6UYYzjfg,1840 +debugpy/_vendored/force_pydevd.py,sha256=rnbdg90Pij22LXkG59FF9FCFcMYdRo4E7qyT5pzYkE0,2216 +debugpy/_vendored/pydevd/__pycache__/pydev_app_engine_debug_startup.cpython-38.pyc,, +debugpy/_vendored/pydevd/__pycache__/pydev_coverage.cpython-38.pyc,, +debugpy/_vendored/pydevd/__pycache__/pydev_pysrc.cpython-38.pyc,, +debugpy/_vendored/pydevd/__pycache__/pydev_run_in_console.cpython-38.pyc,, +debugpy/_vendored/pydevd/__pycache__/pydevconsole.cpython-38.pyc,, +debugpy/_vendored/pydevd/__pycache__/pydevd.cpython-38.pyc,, +debugpy/_vendored/pydevd/__pycache__/pydevd_file_utils.cpython-38.pyc,, +debugpy/_vendored/pydevd/__pycache__/pydevd_tracing.cpython-38.pyc,, +debugpy/_vendored/pydevd/__pycache__/setup_pydevd_cython.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_calltip_util.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_completer.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_execfile.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_filesystem_encoding.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_getopt.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_imports_tipper.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_jy_imports_tipper.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_log.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_saved_modules.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_sys_patch.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_tipper_common.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_console_utils.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_import_hook.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_imports.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_ipython_console.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_ipython_console_011.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_is_thread_alive.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_localhost.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_log.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_monkey.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_monkey_qt.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_override.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_umd.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_versioncheck.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_calltip_util.py,sha256=shNNj8qVCRge6yKmRDbCthzsG71MYtjc4KsaIr1glK0,4687 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_completer.py,sha256=Vk1jAIrXKT5-i49QqC0UL6wvCCSag8J-lu_NoIV-hAI,8544 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_execfile.py,sha256=Y1tsEGh18bIyF4gfIM84RDk8LUOpC4uYcB0uekv74O8,483 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py,sha256=VyYZXhesz-t_frcGmzm6b5K2ccv7yYRTXWcdbqxwpzM,1095 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_getopt.py,sha256=YYphxNwTDyaD73sWznWqc41Clw5vHF85-rIALwHqXB0,4458 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_imports_tipper.py,sha256=cuQ0YXVeBryJBAf3dU4CDcXVyIruYSAX0-vdMgneUpE,12350 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py,sha256=2FIRkXu71bTvaAHQDTRsVVONFuyd5otTX1kM_RC7XJY,17063 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_log.py,sha256=RrRri4IPsTWYPNA-EvTWOSXKoflIFAHxcERQppngJVM,555 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_saved_modules.py,sha256=D93V9B1C3qA9PoL0dV5bosenu5ncVQR2b-RAoxpvWTc,4573 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_sys_patch.py,sha256=7w1l1AAwvglYAAqydScAvUzOulnv59RLVM7ODsio6Cw,2076 +debugpy/_vendored/pydevd/_pydev_bundle/_pydev_tipper_common.py,sha256=hEt5I9k_kxxaJWwinODOEzA_je5uJovcN4iUEYrwXg0,1227 +debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__init__.py,sha256=X_j7-4Z4v6o5aSI9UW92eUWITNeM-qkvXrQbEOUJn7A,12704 +debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_bundle/pydev_console_utils.py,sha256=3jXJZ-tObnlWS5JnHhrI5eCBbtQnt-DcqYd1zsHgNZM,23769 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_import_hook.py,sha256=85M_hwEqlMuazvG5Nk8WFn3SpjDIQ0aO5ahAA7lPBsk,1322 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_imports.py,sha256=N6EMJg371s1SiA1wAFc_zBV9ZYbGBs0oFvTTvnW8q5Y,404 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console.py,sha256=P3tZYpjILr_jyITP39wXLwhQmmwfzVjcBEPOcG73BTc,3821 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console_011.py,sha256=P7WvuETdIVxIYZPdmd7c7vJ8DIZ0FCGVppGz9PIBcIE,21354 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_is_thread_alive.py,sha256=bOvk_nVIxgZH6EuUb4pkWh5L9JVZAw_IlQJVH75tL9o,696 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_localhost.py,sha256=7NTqaf3nAzfkbj1niqZ4xhgPzx9W9SDhgSkwqCazQIo,2070 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_log.py,sha256=9tOvuX0B_KslHXy_Ae9X-InE6IW8p0JWdvimw-WNpuA,7359 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey.py,sha256=VnCoDN9nmCRxYoLllEataitG5gBJj997-wweL2AHriM,40358 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey_qt.py,sha256=7FLTYfeF9YCmzxxki7zcbGt3ieWUVwYG4SkCNGjsEvk,7306 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_override.py,sha256=lL3tGSnQsziztbyePyrk9eu-xmgAWU2YQwxtv2t3to4,872 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_umd.py,sha256=0kfbdk22O6_wAN9b2dGRNyhJVsv2P2VZs9dJHh6Fxl8,6279 +debugpy/_vendored/pydevd/_pydev_bundle/pydev_versioncheck.py,sha256=VpGp1SZeDMPimt--5dq7LDfB6mKyPGRcAsQUCwuFHuw,510 +debugpy/_vendored/pydevd/_pydev_runfiles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_coverage.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_nose.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_parallel.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_parallel_client.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_pytest2.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_unittest.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_xml_rpc.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles.py,sha256=oKip8qMSr9rwIFgxuRY9mKRxZbLSkpQM6hk0tUDfNlA,31550 +debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py,sha256=sWjARGt1-4SZXYDNVML20bWd3IBG5stPgs7Q3Cm4ub8,3499 +debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_nose.py,sha256=wMCy67DodvPIwyADbrSkV3FGp1ZXyixJ4lWc0o_ugJU,7549 +debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py,sha256=Qgn7uUlngUlvw21xCr9D_nj8oVNY_9MVPGzMYhsXKCs,9472 +debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py,sha256=wwl8UBpGRSqTvw8VCdl2S7G0O-SyXtOmVFIPKz4oh9E,7722 +debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py,sha256=Fny0ZPakVcx5ed0tqrtrR9WNor542br0lavONh2XnTs,9845 +debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py,sha256=U1jbD9cvqx2gpeKV-XuYc0AoETohg31Jv1oDTudlu4M,6685 +debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py,sha256=BznpLOP0enjZ4aZus2nQvQbcoj0ThH0JJu-qBbZeXKA,10594 +debugpy/_vendored/pydevd/_pydevd_bundle/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevconsole_code.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_additional_thread_info.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_additional_thread_info_regular.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_api.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_breakpoints.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_bytecode_utils.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_code_to_source.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_collect_bytecode_info.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_comm.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_comm_constants.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_command_line_handling.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_console.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_constants.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_custom_frames.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_cython_wrapper.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_daemon_thread.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_defaults.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_dont_trace.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_dont_trace_files.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_exec2.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_extension_api.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_extension_utils.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_filtering.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_frame.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_frame_utils.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_gevent_integration.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_import_class.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_io.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_json_debug_options.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command_factory_json.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command_factory_xml.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_plugin_utils.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_process_net_command.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_process_net_command_json.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_referrers.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_reload.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_resolver.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_runpy.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_safe_repr.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_save_locals.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_signature.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_source_mapping.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_stackless.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_suspended_frames.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_thread_lifecycle.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_timeout.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_api.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_dispatch.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_dispatch_regular.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_traceproperty.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_utils.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_vars.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_vm_type.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_xml.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py,sha256=7rcDvKv4OweHbJJVUIrVNaq7nRzc2NnbRCWY_wqGeXg,23085 +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/__main__pydevd_gen_debug_adapter_protocol.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_base_schema.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_schema.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_schema_log.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json,sha256=F6Myi84FtlbjechZbCpVsWRwB-Q_z5ixJn7vCJrYy3s,157930 +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocolCustom.json,sha256=NmKM_bN5s6ozUPXIFc_Q3w6unk6Zf1hb1pgyjvxv4QM,10616 +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_base_schema.py,sha256=Ke2Ff_8SRKGSDamE0eZxx8vAYNx4Ka9oJqRmNX5DfQA,3998 +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py,sha256=ViAsceebMsN4ZE8F3nwvcZN_0ABr8gkKdXEl_T5BFsM,763496 +debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema_log.py,sha256=aMICrBzLWCaynZy2TysEvz3_sdHXfrQlFBHJNKl7j7k,1255 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevconsole_code.py,sha256=m2z3GfCLJbbS0_V6_OSlOns21LruvVg1Zkdm2QYtS6Y,19014 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py,sha256=FP57omz4K2irhuf5zlRStGoFLglsw9mcwSS-isJ1S8I,1166 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info_regular.py,sha256=VnRPfq6xsWsKBmzjMvCoS98lgt7LPUTJ-hW1KKssAgo,6239 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_api.py,sha256=Gc8nxECOJFcv5LXOiwB_lzjAcaJThtfpKL4b2saEE2U,50385 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_breakpoints.py,sha256=UAgyAAIiwqLxe-NswVieGiCpuDl2dvb_VUxg4jkZQGw,6010 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_bytecode_utils.py,sha256=Ub1XpTtxqx14P84WriKaEI40Qa6PW4x70Gsc8UJA4ng,26277 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_code_to_source.py,sha256=B30xbup05xjZrYBjA0xP9w5H72lLW7qO6U0jgJlMd7Q,17622 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_collect_bytecode_info.py,sha256=uTy9k8X6fyhnkDToA1ULezuf6Fk71JBmqMAwX8kMyG0,37141 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm.py,sha256=ZnIv-4kbKEDPw8nQYLqwIpaeWjCA_xsPEDJXcPz-owA,74656 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm_constants.py,sha256=S2aiyPQnH-uUdP1hitV2P2dqM9G8t3Ix2CWd3xPF084,6084 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_command_line_handling.py,sha256=wtaFx9j28k-zghFcxaV2Jo0nYF7eqC5VpBYJ5bK0VQ4,5906 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/pydevd_concurrency_logger.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/pydevd_thread_wrappers.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_concurrency_logger.py,sha256=nhYHTjhodOdBQ5ds2_kyyAF7kpqSvOjflPLacyx0SOw,16764 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_thread_wrappers.py,sha256=Ky_yDpWcMfVTOAA0uwyEgAvPQZS7apJCrMM4OAo6BO4,2039 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_console.py,sha256=hIxX9k9ZNR7k9mi4Rtl-2ArzpTap3-hMrgUJgtFobmg,10179 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_constants.py,sha256=bmWgvvMCIb0jLY9Q_I6reIU4AFGZxH8H-qXLfpkQdTc,26450 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_custom_frames.py,sha256=g69QtSQ5MjMlxARXbY95U3Q5t9REt0WPRk4pbvELB5k,4399 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c,sha256=7mGX5WNC-MEa5txmmi5KmxZmF_c7qGaRcGu-HeDbboI,2079167 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.cpython-38-x86_64-linux-gnu.so,sha256=TrdF7_lTLzk6PCDEO5SK7V9aMLT92JTKdlxUbR3IPfI,823216 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pxd,sha256=OzA3y2aGOd7ego0_ParOpQ5mzxj0iy38cNEVy1yKDco,1242 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pyx,sha256=KWGy90qFJueQByWOohvPbz9w-3AOD1k55F29vtbjCAE,91260 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py,sha256=hVfzhgMunBeuVkGl9qVnE6uMFFSX4DdhxPl0K2-Tcd4,1600 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_daemon_thread.py,sha256=JYINrkA8Tz0K8VfMofsIy95ipQrBQrh03BKyFy3D1KA,7964 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_defaults.py,sha256=MPtBnGvKRn65IbEHftl0Y_KH0Yy_jXNjZuzEL8qEQmI,217 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace.py,sha256=vOYpH4i6tWq-DX8oCtW2UNu1BrI7Px-UysiXjLPxjUs,3567 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py,sha256=hHFDIIDFQ9EHi0lz5NSTJ4qGncT2XB2NhOg-rwWSeDU,5814 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_exec2.py,sha256=8q6dcEHDfJE6bdwNyrRFXa_vYOlTrHz3UVBtYzhnJJo,159 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_api.py,sha256=uPT7-XMNe1snBXDzfuFH6fBGPe6xYuD5YEQI1uO-zvA,3288 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_utils.py,sha256=DZYt56rfRm_4wjSXG7cUSFqgJBUKvYOnpHPxIWWpQR4,2369 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_filtering.py,sha256=BvCxrTJt8r5cD1yJzRRalTaQ4TcO6KVbakv-ccb1z0o,12701 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame.py,sha256=ukELUyYIb41yTsOnV7APIELijHOuTP7A7woFqSzXWps,63187 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame_utils.py,sha256=jrasuK7WUbIobkAsnQBcDUZip6hKgoI80MOpqNkYpLo,8923 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_gevent_integration.py,sha256=a69rG88Vw94l5INDMsr9cokCHlmKIAgp88igQlkFEtU,3896 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_import_class.py,sha256=XigIMYbVg6IsNY3shHca-R26vGG5wMTGygdyh38vaU8,1838 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_io.py,sha256=Kkk-SrYQK8-oNZRbuiDOebRlhx0r8VblBwQgy2ndASs,8117 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_json_debug_options.py,sha256=25PzL7lFcGiek3-XB7lbTHZZ4uma6D9WuR3t8lbVHR4,5945 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command.py,sha256=wnKwGD2DcxSrTf_l5XsJPuUwgnnjzzdRoJPuHPyPFqg,4588 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_json.py,sha256=mmguB2K3SOnHMXM85lDOBdhq7wejc-FnsVorVzvIRlw,21328 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_xml.py,sha256=r_lxT5wgomSCUvGRsDEOlMwcC1jEwhgnirmTsFelfFg,22531 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_plugin_utils.py,sha256=5azIt1MGftKwB7Y5YTtvGPC9agQt7xM23zjvKqGjeig,2484 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command.py,sha256=mMW0WX-hL_hsKD9L9XYr2LKrmWz2slRbMXba1iCYh8c,35106 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command_json.py,sha256=tI_yJ4fRtxhhK3-uKtPzThQMOfq9a7Ecrmrt8KFd80k,55820 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_referrers.py,sha256=WVZbrenmO4WKVTMdBFEOfO7JjICIDURT-5EeCy-lu10,9756 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_reload.py,sha256=J4JylvGaK34_DhMNBpqD0YMWgUtWn9LkgShpxzQJ_iI,15773 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_resolver.py,sha256=CnO3Ps8XFNnFklnGQXDsihXnBdU_Lcq8qmsET3tAdsc,25500 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_runpy.py,sha256=EMN-0GQ242o41a9PG_SakVQfox7K-FK2jSV4yPbcdMs,13521 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_safe_repr.py,sha256=_Z3ZTjJQbfg1zDflyp8k8fQLdR3J5L3e4ctHEH55Y2g,14554 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_save_locals.py,sha256=o-e7Xy0UF0CmsUcp1NZ0w3oHhiPhuzNh-XB1Qx-yg6I,3020 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_signature.py,sha256=cwiP2JIzmHrnyzDOL4dZcKRPWMmcCqFZ-Knwl4x8Ieg,6883 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_source_mapping.py,sha256=K8hROuOdVJhXTZGFDi5RP40uHDWK4ZWbG0ix0gNX_oc,6428 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_stackless.py,sha256=P9H6T_jIeukGbhY6TJ-BhPRty3neqP0CiAf65XC-5pg,16909 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_suspended_frames.py,sha256=dIgMmo86Ra22UOdDaTwVqQm2EX6ISFYD1kxB500Kelg,20559 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_thread_lifecycle.py,sha256=I-U_-urNLaxavillhZFfWSEFdX7QMdgq0dMUtiOL29E,3408 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_timeout.py,sha256=V1-pX6yhik_aa1bk9ClkMsN5RaUXFGWRlHawtmTCSrA,8366 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_api.py,sha256=nlf56i9hKz6DikH5Txc_fwZxqN1gSV60Qps0ODWzc4g,1397 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py,sha256=bIDRhihoBHfcVU9jVlK2TuS-sEIoHEYejTkdHsSXagM,3265 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py,sha256=3Q_iAqFyoOd6qMcKg5Dvw8aFqqtRYQ7QJdffGGZmRAc,22202 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_traceproperty.py,sha256=oY3O9UJwf8EIUngMLsJnOQAiyeuOt9Y2SEgm6MsMJ6o,3279 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_utils.py,sha256=8jRq4rUk6GYohDzIMNCweH1wxQLoT2zA-2_-o5Ktw4o,17289 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vars.py,sha256=q2oZGdLAG6zn7OZwLjHhiyT_SrI9Ht2TqoLCbMxFHgM,30786 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vm_type.py,sha256=osPhgiDRcU66KFUQffURSEt1GxLsS5B-DuV1ThHR_sY,1578 +debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_xml.py,sha256=aqVmbpO9F4vH6tpUhaloXJ8uWyG4NJhwoRof2H3Y-XI,14443 +debugpy/_vendored/pydevd/_pydevd_frame_eval/.gitignore,sha256=EKhbR-PpYtbvIYhIYeDDdUKDcU_kZf4kBzDgARh1Fgg,87 +debugpy/_vendored/pydevd/_pydevd_frame_eval/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_eval_cython_wrapper.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_eval_main.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_tracing.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_modify_bytecode.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_cython_wrapper.py,sha256=fsjgGg07ahVCdIMe38savTyAEgQtEzb7vVIldjgLhfE,1343 +debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py,sha256=XzgU2jey-apeQd9NCV40YXOkiwsGJdYdrUSv_mdNu8U,2105 +debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c,sha256=4fh_u7VP3gA6Xsb_m46U5GoGBVUx3CXHnboAXGcboss,935795 +debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.cpython-38-x86_64-linux-gnu.so,sha256=JCtGtos5yRsfdNI_qslR9fozHCA51xTMVcwBSLjF8Jg,359872 +debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd,sha256=0pKrUK3YwQtlJk7vvhZzqnDCJvB6JOb4V23Zl8UrqYI,5324 +debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pyx,sha256=Bw14wOJlvGt6eDdhA5NbEcNAvIgM8hIux77p80LpiBM,31362 +debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.template.pyx,sha256=YOHqtbizcUtC_jMeBlNQ6FLQZn-CZ3fcoqChyHF4h4g,24550 +debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_tracing.py,sha256=HkS8B9I0YbUVsHxRDfc6kYstUTlKc41HqZyD-N5ifQA,4219 +debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_modify_bytecode.py,sha256=cTwAD7Y2HBwZGZWe-M6AnO-Fs5spFuHXIwue1R8RRIw,13545 +debugpy/_vendored/pydevd/_pydevd_frame_eval/release_mem.h,sha256=MbMCNJQXkcJZ8UU7xoH44MwqE3QRWZX5WCAz7zCju6Y,79 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/README.txt,sha256=jgSPsMO3Gc8ncNUe5RwdxdVB-YHyAioWMPXHcD6KbQE,700 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__pycache__/pydevd_fix_code.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/COPYING,sha256=baWkm-Te2LLURwK7TL0zOkMSVjVCU_ezvObHBo298Tk,1074 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/METADATA,sha256=9XadDK6YTQ-FPowYI5DS4ieA7hRGnRP_fM5Z9ioPkEQ,2929 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/RECORD,sha256=2udHTtpgQXukzLaj7MVfrJhBa40hV7SjP8vyZ5vNqMU,2995 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/direct_url.json,sha256=s58Rb4KXRlMKxk-mzpvr_tJRQ-Hx8-DHsU6NdohCnAg,93 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/top_level.txt,sha256=9BhdB7HqYZ-PvHNoWX6ilwLYWQqcgEOLwdb3aXm5Gys,9 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__init__.py,sha256=9hCplBAGV2ZNbI6TkkkC-Zefk_SxbesAVwe2iXtdSPQ,4152 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/bytecode.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/cfg.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/concrete.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/flags.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/instr.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/peephole_opt.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/bytecode.py,sha256=UACCPg0CuM9RIzNMJLqvLlCzeMBNI1UZ-WAspzO7rQM,6983 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/cfg.py,sha256=_ngtd6LTdF74Q7eM0kv-jY70Y-1m2dYOVTdL3LABi6U,15391 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/concrete.py,sha256=0CoEZ5sxVK6BLytJ1KI2qZgSb9-UAMkXwa1GiPaC3ag,22299 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/flags.py,sha256=ipQqlQuvHn_zwjWkaPlf_-LPTjOTcayut3T-sAijYQU,6020 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/instr.py,sha256=z5zgc6dJLWlaOxpmiErmR3qoWK-pbsqFQ5DnZhYxp9w,11721 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/peephole_opt.py,sha256=O7q19q0sDiwN4zVkFGdmceThKK4bQYP_13DFIbH8o8M,15740 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__init__.py,sha256=qlpJ7nivOajEtN7mIjg_wpiQPBgjnkNypVy1KdbmlEw,4996 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_bytecode.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_cfg.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_code.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_concrete.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_flags.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_instr.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_misc.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_peephole_opt.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/util_annotation.cpython-38.pyc,, +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_bytecode.py,sha256=oxR5LHyY7Mp8AnNd5gB9US1E2HLP1_ikKDZ9O1ybB9g,15909 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_cfg.py,sha256=rDlSsB9STCggvl79pv1q6uKUxElzat99z8_KcObcbb8,28547 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_code.py,sha256=5p98FDCpHG2GxUpoMnaw5S6SBakyeMqa5eX29nrOmuo,2425 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_concrete.py,sha256=HKFAg96iZKEjDMpaPwa2LrwERctdWUCTCcnvo-sKnEc,49634 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_flags.py,sha256=vBeWGBHWMvJ4yN7RdJKImSZurjuyeGQz7pHpeBBAKDI,6009 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_instr.py,sha256=hQ6EyqSddjJeUv4vhZFrEiy1xxMiqpyDuCK0xfKbgf8,11676 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_misc.py,sha256=z0K5O16SkAf81IljdDlKumS-x76HSrf5tnNGtsTLuIU,7149 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_peephole_opt.py,sha256=r6-xIPkRHhQlnTWXkt0sU0p0r1A80EgDKoFJTxE2J2A,32993 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/util_annotation.py,sha256=wKq6yPWrzkNlholl5Y10b3VjuCkoiYVgvcIjk_8jzf8,485 +debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/pydevd_fix_code.py,sha256=0IMNqViUb9NM5gPw9Nv9KUay99XkLoW4TnklupYkdDs,1801 +debugpy/_vendored/pydevd/pydev_app_engine_debug_startup.py,sha256=-gA1UJ8pRY3VE8bRc7JhWRmxlRanQ8QG3724O5ioeKA,691 +debugpy/_vendored/pydevd/pydev_coverage.py,sha256=qmd5XNE8Hwtem9m5eDtwbVIxi6U9XvtXIcur2xDP2Uk,3200 +debugpy/_vendored/pydevd/pydev_ipython/README,sha256=rvIWDUoNsPxITSg6EUu3L9DihmZUCwx68vQsqo_FSQg,538 +debugpy/_vendored/pydevd/pydev_ipython/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhook.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookglut.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookgtk.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookgtk3.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookpyglet.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookqt4.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookqt5.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhooktk.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookwx.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/matplotlibtools.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt_for_kernel.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt_loaders.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/__pycache__/version.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_ipython/inputhook.py,sha256=GjhfMC0wvhSGsDKTOTEOt0Gl6FleaS81yUn4AxGIlZY,19554 +debugpy/_vendored/pydevd/pydev_ipython/inputhookglut.py,sha256=J5QUoxuqZLwvb8nBjIkrmBr-pTULVLp86UsdmImhYY8,5675 +debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk.py,sha256=LDLg0tDXbmv-YoODReQKtOu5Aa3ECBr9HUaNJV1UYk0,1107 +debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk3.py,sha256=Sh382xr25ztKAQfChBNm4D-NC5UoY-Ho0LTIj3i4Wi8,1104 +debugpy/_vendored/pydevd/pydev_ipython/inputhookpyglet.py,sha256=8lVbOG3Lucf_bX97HLNHXbq-wekKs1BUh79rMmh_bYg,3255 +debugpy/_vendored/pydevd/pydev_ipython/inputhookqt4.py,sha256=DKOobEvH6bIkkvOF581Vj-rQXWSUU6f7cEc9fzXr3_g,7242 +debugpy/_vendored/pydevd/pydev_ipython/inputhookqt5.py,sha256=82p6mLMradWAyzHh9faHb_eQJRigIgaW74WRbNLaoFc,7289 +debugpy/_vendored/pydevd/pydev_ipython/inputhooktk.py,sha256=bW7hLVv2JOuP00TSeqIw9O3KKNVHtBBpE5bHASW-bSo,748 +debugpy/_vendored/pydevd/pydev_ipython/inputhookwx.py,sha256=h804ZBfWPLy5ITbQSkzwELkGO4BqZtZB2b9izZXcpQk,6517 +debugpy/_vendored/pydevd/pydev_ipython/matplotlibtools.py,sha256=bZSIYY09Cny96-NpxOdbmU0lueNEaCfIhJP87D-dbFc,5378 +debugpy/_vendored/pydevd/pydev_ipython/qt.py,sha256=Ley-7H_Fn40za6lJKmekC-r0uOTeOgnH6FIGUBaGqP8,785 +debugpy/_vendored/pydevd/pydev_ipython/qt_for_kernel.py,sha256=HF68WlXR08W6-4B3DjuF-5AbNEMLq-NXIVscoSUTEFc,3619 +debugpy/_vendored/pydevd/pydev_ipython/qt_loaders.py,sha256=ZxyWPGfCtdYbGrtJ49BkAWu7CEW6w38VjSnoeEzvWjM,8413 +debugpy/_vendored/pydevd/pydev_ipython/version.py,sha256=lLBSR8mtlF1eCzwwOejljchAyfSy0opD8b0w_QjR97Q,1227 +debugpy/_vendored/pydevd/pydev_pysrc.py,sha256=LKtwQyDYYry3lhL7YalgmehgWD82-NDpqQYYi1bTYj8,100 +debugpy/_vendored/pydevd/pydev_run_in_console.py,sha256=bnrvimUb2pj9Gtcu09qNmSQFGY-lAcu1Yz9M6IJsqGM,4709 +debugpy/_vendored/pydevd/pydev_sitecustomize/__not_in_default_pythonpath.txt,sha256=hnkTAuxSFW_Tilgw0Bt1RVLrfGRE3hYjAmTPm1k-sc8,21 +debugpy/_vendored/pydevd/pydev_sitecustomize/__pycache__/sitecustomize.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydev_sitecustomize/sitecustomize.py,sha256=kHFVCILQngtbNTgiaTu0icWDh7hRzqIpMlASeULI8Wo,9473 +debugpy/_vendored/pydevd/pydevconsole.py,sha256=Y4a1Kq3Y8qug4Y6PjFnM46RbMcpsy5OxdTIBTBHaY6M,21094 +debugpy/_vendored/pydevd/pydevd.py,sha256=YnQuNplOhJYzHPpxS7an8eRDNqYBvgPLLiguY_xirWA,144860 +debugpy/_vendored/pydevd/pydevd_attach_to_process/README.txt,sha256=JibLodU4lzwvGyI8TNbfYhu626MPpYN3z4YAw82zTPU,960 +debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_always_live_program.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_check.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_test_attach_to_process.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_test_attach_to_process_linux.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/add_code_to_python_process.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/attach_pydevd.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/attach_script.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/_always_live_program.py,sha256=I8Cbq2TR04ferhN1DMds9agFTqF8RST6Pw6diDFHr6o,679 +debugpy/_vendored/pydevd/pydevd_attach_to_process/_check.py,sha256=9AE9SHJNK7bwNjIsaadqhfOM11tvgZm64KgDPFtLSDY,135 +debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process.py,sha256=kM9LFgzX_qQofDb_X0hYMSb8Qfoga-exKBlng3__KZw,297 +debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process_linux.py,sha256=ATL8WvNcGPi3wTHtTQL23UhKxnbtG9dwB3MTZEiC-2E,2523 +debugpy/_vendored/pydevd/pydevd_attach_to_process/add_code_to_python_process.py,sha256=-tJiQ4SRFwc5Qw6ZKdQzH2sCXwWp0PVzOFxyG7ZjFcg,22296 +debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_linux_amd64.so,sha256=IbEVxQj-FG3I-S68jUhVI0nhY__BfqEKrWL9kMx4yiI,22760 +debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_pydevd.py,sha256=EHK1vY0R36A-ZZen6qNZeuclG-EA2frLj1FGOoKLqno,2255 +debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_script.py,sha256=eyMzXuwxv8KYM4jFv3sKiRmlsx1nwxIfDd0V-B0JKhU,7834 +debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace.hpp,sha256=GCjlTIQW1wRoEpp1yCbeJiUaa9JDTbeOypJiZBRtxPE,8399 +debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_310.hpp,sha256=mNr6LVLPDoCRyxLPTdYb0JWDXSfRn7xuAzPOzZWvoFs,4062 +debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_311.hpp,sha256=_tpO9I0U0f2RqCYM8DIOPQJTLv8sL2NCxwKE2BnR0NE,4269 +debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_common.hpp,sha256=r1ch6UgwF4rxW8ehiNnAvJE18VCoUl2TujP7nTCy0vQ,1870 +debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_settrace.hpp,sha256=41ivhinwfCQmmYUDpEjaEMxCVFx6u--k2sBRO-cGcq0,7763 +debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_utils.hpp,sha256=hxR-qpxpXQTupO-AgnasBq1j69ztvTdFUF8xWuiEdWA,3811 +debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_version.hpp,sha256=tn11Wl2u0aLBj7Z0mcrYSCSOH6JrZ4e9P3jXSCZxySo,2617 +debugpy/_vendored/pydevd/pydevd_attach_to_process/common/python.h,sha256=ueIyBiClInxfKMlgAVMuxTvmhESadiWeA4uSwZAROeo,21631 +debugpy/_vendored/pydevd/pydevd_attach_to_process/common/ref_utils.hpp,sha256=8wDFQk9XoAnK2EdM4J-RErKfBYZn93uG6Rw5OCbLsA0,1475 +debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/.gitignore,sha256=r3rDatBumb9cRDhx35hsdJp9URPUmjgkynAQViLIoR4,82 +debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/__pycache__/lldb_prepare.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/attach.cpp,sha256=xG8NmOwfuhXN93spuG_uEfe0tOn32hnljCqY5f1z354,3703 +debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_linux.sh,sha256=YbBXAMwXBEE6-jh4cldqPV9USk6rN_fOmGdYOJZsHC8,442 +debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_mac.sh,sha256=Pr6_ZbfvyRx-kmGNedRSI06ZtZZPRYwPj3VThRrolKs,232 +debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_manylinux.cmd,sha256=W93C4jG-fGn27rd1Yes3neP2upJTO9qAdKZPf8cvSQE,633 +debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/lldb_prepare.py,sha256=eSmL1KLLOrG8r4RJyVOd3USUjsplWXdKSCCnnGyGVdo,1691 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__init__.py,sha256=2VU5wHMC1RElLHJa5cwPVo6bK8sRDics9cFMtqx3mq4,7917 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/breakpoint.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/compat.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/crash.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/debug.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/disasm.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/event.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/interactive.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/module.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/process.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/registry.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/search.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/sql.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/system.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/textio.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/thread.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/util.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/window.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py,sha256=49N-AlYBS8S8ZC_lOXqQKq0ttdWOKjfmRpq8ESfNn84,168168 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/compat.py,sha256=0KEHUJM9HVPJkzEa_f6cWb6LB7ickr7xOqKvuOVjjeA,5230 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/crash.py,sha256=bAwe0hjMepdZkfIESfCJSxQJOnCxC7yp046nd6bJTI0,65394 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/debug.py,sha256=TI59UyIasBZF6iln8OMxtQCkIv4t6hRXqYaED8bRTzg,58709 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/disasm.py,sha256=e45vidMFkVl2s5nu2YJW4iE5Ng1KnWobNWBWnbaZskQ,24409 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/event.py,sha256=QzHPCcGvEZ1LrL33ddVegMsb6BURRgVBSi2nV20aOTs,67241 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/interactive.py,sha256=Nmm9hGyn5XnxFccS0vmLmHl25ReTXPxZSgIiWQCaPQg,83555 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/module.py,sha256=aQOXcKztzBzIhBNuEbSvaNZ-xtDERviWynRzbSVpesw,70615 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/process.py,sha256=ViXvPwwEz2_EKrG-myicFCrtUjirrOmCon36AjeUI78,183635 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/registry.py,sha256=BmThLf5TaXsPEXzk16PqtZHIArZKpSK7f1srG8oFpJ4,21569 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/search.py,sha256=NuUoyoU7lMZovibgZIjkTKtRsxWVuY3FRYfOJicM7-k,23798 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/sql.py,sha256=vHnOHabuFHMq4JBu7qf9O6omUrSTOsOGxdxjg81fvuw,34997 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/system.py,sha256=L5oZqyn68kkoDpFnOYJkaIdTjFle1_tBPwaTPfcKzvo,45884 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/textio.py,sha256=osUFHqxoZBdGQLmfvPXEFikvNPY_JbiQub5N2_tgee0,62691 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/thread.py,sha256=uEuXm8xSq4iwZNfwW6wXnrTvF4fpvkYTz--VQUYQ2mg,75478 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/util.py,sha256=9GFD52PGOg0czJc0oUlpE8sTJVHZnmam84teq938xeg,36223 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__init__.py,sha256=LjPLQ0pv6rcHQmb32jV12bGUw_CTsi0atfNuOvkg2nc,2845 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/advapi32.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/context_amd64.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/context_i386.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/dbghelp.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/defines.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/gdi32.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/kernel32.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/ntdll.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/peb_teb.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/psapi.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/shell32.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/shlwapi.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/user32.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/version.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/wtsapi32.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/advapi32.py,sha256=2EFvqeuxUGiiDGTzkHMV4fEeDZvMxmrByD5hu9ClM8A,120809 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_amd64.py,sha256=8eT1_GvmfpRI2HWWqdNDRJjA6TvpwVl8ZQcVvSZFCOY,25137 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_i386.py,sha256=CLD5RAQi686FfCs0bszEgh_ZVFIj-YrUEt8HRvHE5HE,16108 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/dbghelp.py,sha256=6mZA5sDvGrIfP76-Jv6bvIYON3cjTruYt29Cxu2QKdQ,46705 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/defines.py,sha256=Verc00KnY3XSxI0KMpb81U4P6k6MUHCe-NgsxlG7Ie8,22799 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/gdi32.py,sha256=5i8RpG43wEMAYbcBjEwJaemrVhrlFwGKhkL947CR-7E,16829 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py,sha256=TeRsADg7eZN1dw7aGepbOIZobGmiKzddT-bKEZ97bQI,164818 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/ntdll.py,sha256=7PSunl1ixZo5y-bol-w53iE4GhKEPYv7KRiI9_CiOms,22847 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py,sha256=KjCX0Yte_g7ty240hehVBbadxmhI2M--bVu0SfhKS9E,159230 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/psapi.py,sha256=V5n9HSLn0fv5qwe83zMjGGBcTeJ2woJzS_hdjGQhps4,13762 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shell32.py,sha256=T2MAPhKCqGfg_JxYGZkWChzlDtDsRAeczsL8fGhSYDA,14007 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shlwapi.py,sha256=x7jV5_99GrZU0wlD-ePUeM09Uq_PJ7L1M9YwucIMUEw,25807 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/user32.py,sha256=-YYdVrMVUZWpzMIBUbeLkNlhQ7VCBhvjWLdZtN9u0Vo,57177 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/version.py,sha256=uBtrPoubwMv_1CLSf42kpPQOiOrinCtYJkxWqzOl09I,36813 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/wtsapi32.py,sha256=P-4JIP38rtbQ-iHtdjSBst656mfBeFEBWPZyOrCD_c0,11164 +debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py,sha256=E7zOrJKXXm2OnRp_Xvt868_BwEVdh_39etqDQgZYLjQ,24309 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.cpp,sha256=qRJVdptEyvZtM70x0XmSkGhl3U28EYBZ9zCB2-GW3pE,27447 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.h,sha256=rWBA3kdzfyHaE9X9Diub3cojoJlXjC3TKnLQv-nGCeA,1846 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/compile_windows.bat,sha256=Ya3dZjibeVnkN-M3XX5SH_mLGYKTcpJCZSQlSxL6a6A,2074 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/inject_dll.cpp,sha256=GQmZbpNBRMMW1WFcDFOlJaGLy1-oZ4qCCIyo5exPLBM,4792 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/py_win_helpers.hpp,sha256=45pO-c1ofub4nn0XY9kMsTm3s48_EPE-VWAhld3704I,2479 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_in_memory.hpp,sha256=zvDC9cVGZ6BXEsz_Im-QMIubdw6vX3BCIg2T11cVfvg,3355 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_on_dllmain.cpp,sha256=ApQza8ZJrfe2U4jTJPa8ItzvnHc7w2G6Yrfq4BT_56g,2516 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.cpp,sha256=NO8qlc7sKU6oHA_AnXJSodHupZLizp3npBTc-EGpBj8,999 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.h,sha256=l0tEIxxiv0XrIeiJ2zedTd4fYrdlxVTK8ECpBk881WM,1162 +debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/targetver.h,sha256=fqw-iPopG_V-pFRxf33lVFL0uvWfDUB2ky0bibFblK0,1013 +debugpy/_vendored/pydevd/pydevd_file_utils.py,sha256=YkKjY_2YRtaitEsDw3JbM6DSUWrTm2K3HJRA586lvjk,35201 +debugpy/_vendored/pydevd/pydevd_plugins/__init__.py,sha256=3rZDMhSmpCE3y-ipjPjQrUv2CIm5ohCCprSJUoMrcqQ,294 +debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/django_debug.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/jinja2_debug.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/pydevd_line_validation.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/django_debug.py,sha256=VIBdRwe3Ia3m4LwdyCmnm93ybPXoOwXIE8HUCy9JMcw,22430 +debugpy/_vendored/pydevd/pydevd_plugins/extensions/README.md,sha256=cxu8F295snUVNgqE5xXKnZTpbqbR3LTmm60pgK0IOTs,1183 +debugpy/_vendored/pydevd/pydevd_plugins/extensions/__init__.py,sha256=rqtNsfDcKgQkcDt1wK2STXLrIH8Nv1NsWsQHe_mDZ30,227 +debugpy/_vendored/pydevd/pydevd_plugins/extensions/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__init__.py,sha256=rqtNsfDcKgQkcDt1wK2STXLrIH8Nv1NsWsQHe_mDZ30,227 +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/__init__.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_helpers.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugin_numpy_types.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugin_pandas_types.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugins_django_form_str.cpython-38.pyc,, +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_helpers.py,sha256=0fFR63gGOCVNWHp-e8El6OPlBlljTJwCe1oEJWXPv5M,639 +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py,sha256=YKxpuCMgumbjcC94Gk91hy26obDMOlB5Z3CxmaR4960,2945 +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_pandas_types.py,sha256=BHGHjYJwpQ0Gh1LG3bFSLh1FbRtWJVkVzYyCueLP870,5791 +debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugins_django_form_str.py,sha256=quqrRUKuKEPoT37MBla7wJCSuWHio1dI1cEK1G13qO0,538 +debugpy/_vendored/pydevd/pydevd_plugins/jinja2_debug.py,sha256=z6XdAlP0Wf9PyluABWF2GICHnTfKaN3gRKvmuh6uymU,19141 +debugpy/_vendored/pydevd/pydevd_plugins/pydevd_line_validation.py,sha256=Kl48sEmnu7cn8P4GTflgsqhfOh3UUNCp_qfyjN3B8FI,6286 +debugpy/_vendored/pydevd/pydevd_tracing.py,sha256=wTEwlotgDDexiPcbX3N8lmwSK5MFxNE13VEMH1iKaB8,14804 +debugpy/_vendored/pydevd/setup_pydevd_cython.py,sha256=KcRQ-SPbua2b0AZEiFExT0PiQiVce7FSLsdWoqBBvAI,10410 +debugpy/_version.py,sha256=vCF3BLaoX0eSt1WiEnA6qRVGUVkGYn58lj6F3XhwJbA,497 +debugpy/adapter/__init__.py,sha256=ewTjlS3VAb6lypFWfGZjY7j2wiW6ApS3KSPJrm0xsEk,346 +debugpy/adapter/__main__.py,sha256=tgLZr7jDZEfZ_x6bIel2p5Q2PRDHxpF-D4pYE_LrwgU,8069 +debugpy/adapter/__pycache__/__init__.cpython-38.pyc,, +debugpy/adapter/__pycache__/__main__.cpython-38.pyc,, +debugpy/adapter/__pycache__/clients.cpython-38.pyc,, +debugpy/adapter/__pycache__/components.cpython-38.pyc,, +debugpy/adapter/__pycache__/launchers.cpython-38.pyc,, +debugpy/adapter/__pycache__/servers.cpython-38.pyc,, +debugpy/adapter/__pycache__/sessions.cpython-38.pyc,, +debugpy/adapter/clients.py,sha256=QOVtBAp9mzdLmZEI7wZPKihNk-NCa_uBW62dh6oAPYs,28343 +debugpy/adapter/components.py,sha256=UGZg2cTDq4oHi3OrJ5bW2zdFe5MAw-lTZ9lkjRbAwgM,6081 +debugpy/adapter/launchers.py,sha256=tuMd0Hs6ZQpUzIZRwaYbuRsP1vhLihG7Qt04Svdwy4A,6864 +debugpy/adapter/servers.py,sha256=9FP5bv9WtTeMtL5J4J0asSbpC1uUjr9JAUtStoPAxp8,21324 +debugpy/adapter/sessions.py,sha256=Uy9DVFcqNu_5ehEb5G7hTE6QVLCy3MnxrGPUQduhTQw,10889 +debugpy/common/__init__.py,sha256=PEfe5T2vJWqSQ9n5iXi5rWfRdOyK7rSz_LND5at64qc,592 +debugpy/common/__pycache__/__init__.cpython-38.pyc,, +debugpy/common/__pycache__/json.cpython-38.pyc,, +debugpy/common/__pycache__/log.cpython-38.pyc,, +debugpy/common/__pycache__/messaging.cpython-38.pyc,, +debugpy/common/__pycache__/singleton.cpython-38.pyc,, +debugpy/common/__pycache__/sockets.cpython-38.pyc,, +debugpy/common/__pycache__/stacks.cpython-38.pyc,, +debugpy/common/__pycache__/timestamp.cpython-38.pyc,, +debugpy/common/__pycache__/util.cpython-38.pyc,, +debugpy/common/json.py,sha256=NoinXsMZHybYGNiSbq3_OWPJxtmYbDew6RkqF3zMyZ8,9674 +debugpy/common/log.py,sha256=Z7UhRcCLTbX44WMfQaVQDu1WEnMMVJZ6JHIRbL0vii0,10681 +debugpy/common/messaging.py,sha256=pZ3fxB7mNaCBh89rRTRcirGvmfxDocF1Nx-XFT-iHJs,56396 +debugpy/common/singleton.py,sha256=bSTqWB9bLp1SpP1W9-LH_OlU9Arbd7pqu4OcjYKDQ_0,7666 +debugpy/common/sockets.py,sha256=EwHRSTFvUn0oBkWFUM1KnP2nesTdCK1QIMQuykmVjL8,3636 +debugpy/common/stacks.py,sha256=czZjqyY_5ntvOSpelZlJkpH4Gqq9JyZY7tcUqz4sWXA,1526 +debugpy/common/timestamp.py,sha256=ZocK6sWz2JUD1hBAKj672ta8D3ze0Z3zJD_CWjeDq7A,410 +debugpy/common/util.py,sha256=CPWCyS757aIcGISxf_SbaGlewSCFOgou0raEKfCZ56I,4646 +debugpy/launcher/__init__.py,sha256=L7aoLf-CaGikoiEJokF5JmSL0Y7FWIn2EOJFV89KbbY,890 +debugpy/launcher/__main__.py,sha256=yLvc7PNl8YulPLINLZVBGY-38hClmkgec0LmkEQna_Q,3812 +debugpy/launcher/__pycache__/__init__.cpython-38.pyc,, +debugpy/launcher/__pycache__/__main__.cpython-38.pyc,, +debugpy/launcher/__pycache__/debuggee.cpython-38.pyc,, +debugpy/launcher/__pycache__/handlers.cpython-38.pyc,, +debugpy/launcher/__pycache__/output.cpython-38.pyc,, +debugpy/launcher/__pycache__/winapi.cpython-38.pyc,, +debugpy/launcher/debuggee.py,sha256=uUOkA8E-67FrrtOrarI_yEfDBsnn76opcNW1PmFxb9M,8574 +debugpy/launcher/handlers.py,sha256=vsMHp4SKqbR1ar27RW7t-SbApXdWPfhS7qLlgr_cw8g,5728 +debugpy/launcher/output.py,sha256=R8YWa7ccb9Sy_ighQqN9R5W1OdMNcnmizmTLYc6yTsg,3748 +debugpy/launcher/winapi.py,sha256=7ACn2Hxf8Kx5bBmxmuC-0A_hG60kEfrDtrZW_BnnjV4,3129 +debugpy/public_api.py,sha256=Om39_shVxVU1JnbZC-_sq4CPd3Fvpek4OcK-LnSPFyY,5887 +debugpy/server/__init__.py,sha256=mmPRoui4PkSeZxG3r5Gep6YB0MLMq_lIJru1pfGmKUY,323 +debugpy/server/__pycache__/__init__.cpython-38.pyc,, +debugpy/server/__pycache__/api.cpython-38.pyc,, +debugpy/server/__pycache__/attach_pid_injected.cpython-38.pyc,, +debugpy/server/__pycache__/cli.cpython-38.pyc,, +debugpy/server/api.py,sha256=oomBRxlPS8r8SedZdcCq7LSgyivnlCL-g5LVhaPX7bQ,10999 +debugpy/server/attach_pid_injected.py,sha256=mx8G8CDw5HGOPBM5XGkjkLka_LHVQJEuMJ4tUJHQqX8,2734 +debugpy/server/cli.py,sha256=-RTBelnNYN_IdnpqNLcU4ynZnf8RbOa0Ey-vLLjfk3Q,13289 diff --git a/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/WHEEL b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/WHEEL new file mode 120000 index 0000000..ab72d5e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ea/37/78/88bd4f76838399cb1b5fce33e678db798aaf372d283d03104f797ce10a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/direct_url.json new file mode 100644 index 0000000..2c3cc6f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=daadab4403427abd090eccb38d8901afd8b393e01fd243048fab3f1d7132abb4"}, "url": "https://files.pythonhosted.org/packages/d2/e3/d0531ee73216d553d717bf4ac51dff297f89054619fa69db61eef028a07f/debugpy-1.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/top_level.txt new file mode 120000 index 0000000..f66d5ea --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy-1.6.3.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e8/e9/ba/25311aaa45a78fef7eee424e26bf7cf2c4ceea24ae88ae0df57e912e98 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/ThirdPartyNotices.txt b/venv/lib/python3.8/site-packages/debugpy/ThirdPartyNotices.txt new file mode 120000 index 0000000..036cf0b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/ThirdPartyNotices.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5b/39/93/f39dc19473ad904895ff1b64f7a89acd87950ca581dada8a32d726fe3a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/__init__.py b/venv/lib/python3.8/site-packages/debugpy/__init__.py new file mode 100644 index 0000000..975bec7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/__init__.py @@ -0,0 +1,38 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +"""An implementation of the Debug Adapter Protocol (DAP) for Python. + +https://microsoft.github.io/debug-adapter-protocol/ +""" + +# debugpy stable public API consists solely of members of this module that are +# enumerated below. +__all__ = [ # noqa + "__version__", + "breakpoint", + "configure", + "connect", + "debug_this_thread", + "is_client_connected", + "listen", + "log_to", + "trace_this_thread", + "wait_for_client", +] + +import sys + +assert sys.version_info >= (3, 7), ( + "Python 3.6 and below is not supported by this version of debugpy; " + "use debugpy 1.5.1 or earlier." +) + + +# Actual definitions are in a separate file to work around parsing issues causing +# SyntaxError on Python 2 and preventing the above version check from executing. +from debugpy.public_api import * # noqa +from debugpy.public_api import __version__ + +del sys diff --git a/venv/lib/python3.8/site-packages/debugpy/__main__.py b/venv/lib/python3.8/site-packages/debugpy/__main__.py new file mode 100644 index 0000000..24c2b7f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/__main__.py @@ -0,0 +1,39 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import sys + + +if __name__ == "__main__": + # debugpy can also be invoked directly rather than via -m. In this case, the first + # entry on sys.path is the one added automatically by Python for the directory + # containing this file. This means that import debugpy will not work, since we need + # the parent directory of debugpy/ to be in sys.path, rather than debugpy/ itself. + # + # The other issue is that many other absolute imports will break, because they + # will be resolved relative to debugpy/ - e.g. `import debugger` will then try + # to import debugpy/debugger.py. + # + # To fix both, we need to replace the automatically added entry such that it points + # at parent directory of debugpy/ instead of debugpy/ itself, import debugpy with that + # in sys.path, and then remove the first entry entry altogether, so that it doesn't + # affect any further imports we might do. For example, suppose the user did: + # + # python /foo/bar/debugpy ... + # + # At the beginning of this script, sys.path will contain "/foo/bar/debugpy" as the + # first entry. What we want is to replace it with "/foo/bar', then import debugpy + # with that in effect, and then remove the replaced entry before any more + # code runs. The imported debugpy module will remain in sys.modules, and thus all + # future imports of it or its submodules will resolve accordingly. + if "debugpy" not in sys.modules: + # Do not use dirname() to walk up - this can be a relative path, e.g. ".". + sys.path[0] = sys.path[0] + "/../" + import debugpy # noqa + + del sys.path[0] + + from debugpy.server import cli + + cli.main() diff --git a/venv/lib/python3.8/site-packages/debugpy/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..3ba7215 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/__pycache__/__main__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000..09f4b09 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/__pycache__/__main__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/__pycache__/_version.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/__pycache__/_version.cpython-38.pyc new file mode 100644 index 0000000..7605dd1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/__pycache__/_version.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/__pycache__/public_api.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/__pycache__/public_api.cpython-38.pyc new file mode 100644 index 0000000..07d9706 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/__pycache__/public_api.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/__init__.py new file mode 100644 index 0000000..daf9f90 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/__init__.py @@ -0,0 +1,126 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import contextlib +from importlib import import_module +import os +import sys + +from . import _util + + +VENDORED_ROOT = os.path.dirname(os.path.abspath(__file__)) +# TODO: Move the "pydevd" git submodule to the debugpy/_vendored directory +# and then drop the following fallback. +if "pydevd" not in os.listdir(VENDORED_ROOT): + VENDORED_ROOT = os.path.dirname(VENDORED_ROOT) + + +def list_all(resolve=False): + """Return the list of vendored projects.""" + # TODO: Derive from os.listdir(VENDORED_ROOT)? + projects = ["pydevd"] + if not resolve: + return projects + return [project_root(name) for name in projects] + + +def project_root(project): + """Return the path the root dir of the vendored project. + + If "project" is an empty string then the path prefix for vendored + projects (e.g. "debugpy/_vendored/") will be returned. + """ + if not project: + project = "" + return os.path.join(VENDORED_ROOT, project) + + +def iter_project_files(project, relative=False, **kwargs): + """Yield (dirname, basename, filename) for all files in the project.""" + if relative: + with _util.cwd(VENDORED_ROOT): + for result in _util.iter_all_files(project, **kwargs): + yield result + else: + root = project_root(project) + for result in _util.iter_all_files(root, **kwargs): + yield result + + +def iter_packaging_files(project): + """Yield the filenames for all files in the project. + + The filenames are relative to "debugpy/_vendored". This is most + useful for the "package data" in a setup.py. + """ + # TODO: Use default filters? __pycache__ and .pyc? + prune_dir = None + exclude_file = None + try: + mod = import_module("._{}_packaging".format(project), __name__) + except ImportError: + pass + else: + prune_dir = getattr(mod, "prune_dir", prune_dir) + exclude_file = getattr(mod, "exclude_file", exclude_file) + results = iter_project_files( + project, relative=True, prune_dir=prune_dir, exclude_file=exclude_file + ) + for _, _, filename in results: + yield filename + + +def prefix_matcher(*prefixes): + """Return a module match func that matches any of the given prefixes.""" + assert prefixes + + def match(name, module): + for prefix in prefixes: + if name.startswith(prefix): + return True + else: + return False + + return match + + +def check_modules(project, match, root=None): + """Verify that only vendored modules have been imported.""" + if root is None: + root = project_root(project) + extensions = [] + unvendored = {} + for modname, mod in list(sys.modules.items()): + if not match(modname, mod): + continue + try: + filename = getattr(mod, "__file__", None) + except: # In theory it's possible that any error is raised when accessing __file__ + filename = None + if not filename: # extension module + extensions.append(modname) + elif not filename.startswith(root): + unvendored[modname] = filename + return unvendored, extensions + + +@contextlib.contextmanager +def vendored(project, root=None): + """A context manager under which the vendored project will be imported.""" + if root is None: + root = project_root(project) + # Add the vendored project directory, so that it gets tried first. + sys.path.insert(0, root) + try: + yield root + finally: + sys.path.remove(root) + + +def preimport(project, modules, **kwargs): + """Import each of the named modules out of the vendored project.""" + with vendored(project, **kwargs): + for name in modules: + import_module(name) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..9730f27 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/_pydevd_packaging.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/_pydevd_packaging.cpython-38.pyc new file mode 100644 index 0000000..58b62f4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/_pydevd_packaging.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/_util.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/_util.cpython-38.pyc new file mode 100644 index 0000000..93fe8c7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/_util.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/force_pydevd.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/force_pydevd.cpython-38.pyc new file mode 100644 index 0000000..7e62e66 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/__pycache__/force_pydevd.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/_pydevd_packaging.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/_pydevd_packaging.py new file mode 100644 index 0000000..87cffd3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/_pydevd_packaging.py @@ -0,0 +1,48 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from . import VENDORED_ROOT +from ._util import cwd, iter_all_files + + +INCLUDES = [ + 'setup_pydevd_cython.py', +] + + +def iter_files(): + # From the root of pydevd repo, we want only scripts and + # subdirectories that constitute the package itself (not helper + # scripts, tests etc). But when walking down into those + # subdirectories, we want everything below. + + with cwd(VENDORED_ROOT): + return iter_all_files('pydevd', prune_dir, exclude_file) + + +def prune_dir(dirname, basename): + if basename == '__pycache__': + return True + elif dirname != 'pydevd': + return False + elif basename.startswith('pydev'): + return False + elif basename.startswith('_pydev'): + return False + return True + + +def exclude_file(dirname, basename): + if dirname == 'pydevd': + if basename in INCLUDES: + return False + elif not basename.endswith('.py'): + return True + elif 'pydev' not in basename: + return True + return False + + if basename.endswith('.pyc'): + return True + return False diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/_util.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/_util.py new file mode 100644 index 0000000..7eab574 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/_util.py @@ -0,0 +1,59 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import contextlib +import os + + +@contextlib.contextmanager +def cwd(dirname): + """A context manager for operating in a different directory.""" + orig = os.getcwd() + os.chdir(dirname) + try: + yield orig + finally: + os.chdir(orig) + + +def iter_all_files(root, prune_dir=None, exclude_file=None): + """Yield (dirname, basename, filename) for each file in the tree. + + This is an alternative to os.walk() that flattens out the tree and + with filtering. + """ + pending = [root] + while pending: + dirname = pending.pop(0) + for result in _iter_files(dirname, pending, prune_dir, exclude_file): + yield result + + +def iter_tree(root, prune_dir=None, exclude_file=None): + """Yield (dirname, files) for each directory in the tree. + + The list of files is actually a list of (basename, filename). + + This is an alternative to os.walk() with filtering.""" + pending = [root] + while pending: + dirname = pending.pop(0) + files = [] + for _, b, f in _iter_files(dirname, pending, prune_dir, exclude_file): + files.append((b, f)) + yield dirname, files + + +def _iter_files(dirname, subdirs, prune_dir, exclude_file): + for basename in os.listdir(dirname): + filename = os.path.join(dirname, basename) + if os.path.isdir(filename): + if prune_dir is not None and prune_dir(dirname, basename): + continue + subdirs.append(filename) + else: + # TODO: Use os.path.isfile() to narrow it down? + if exclude_file is not None and exclude_file(dirname, basename): + continue + yield dirname, basename, filename diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/force_pydevd.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/force_pydevd.py new file mode 100644 index 0000000..1b9dd5d --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/force_pydevd.py @@ -0,0 +1,62 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from importlib import import_module +import os +import warnings + +from . import check_modules, prefix_matcher, preimport, vendored + +# Ensure that pydevd is our vendored copy. +_unvendored, _ = check_modules('pydevd', + prefix_matcher('pydev', '_pydev')) +if _unvendored: + _unvendored = sorted(_unvendored.values()) + msg = 'incompatible copy of pydevd already imported' + # raise ImportError(msg) + warnings.warn(msg + ':\n {}'.format('\n '.join(_unvendored))) + +# If debugpy logging is enabled, enable it for pydevd as well +if "DEBUGPY_LOG_DIR" in os.environ: + os.environ[str("PYDEVD_DEBUG")] = str("True") + os.environ[str("PYDEVD_DEBUG_FILE")] = os.environ["DEBUGPY_LOG_DIR"] + str("/debugpy.pydevd.log") + +# Constants must be set before importing any other pydevd module +# # due to heavy use of "from" in them. +with vendored('pydevd'): + pydevd_constants = import_module('_pydevd_bundle.pydevd_constants') +# We limit representation size in our representation provider when needed. +pydevd_constants.MAXIMUM_VARIABLE_REPRESENTATION_SIZE = 2 ** 32 + +# Now make sure all the top-level modules and packages in pydevd are +# loaded. Any pydevd modules that aren't loaded at this point, will +# be loaded using their parent package's __path__ (i.e. one of the +# following). +preimport('pydevd', [ + '_pydev_bundle', + '_pydev_runfiles', + '_pydevd_bundle', + '_pydevd_frame_eval', + 'pydev_ipython', + 'pydevd_plugins', + 'pydevd', +]) + +# When pydevd is imported it sets the breakpoint behavior, but it needs to be +# overridden because by default pydevd will connect to the remote debugger using +# its own custom protocol rather than DAP. +import pydevd # noqa +import debugpy # noqa + + +def debugpy_breakpointhook(): + debugpy.breakpoint() + + +pydevd.install_breakpointhook(debugpy_breakpointhook) + +# Ensure that pydevd uses JSON protocol +from _pydevd_bundle import pydevd_constants +from _pydevd_bundle import pydevd_defaults +pydevd_defaults.PydevdCustomization.DEFAULT_PROTOCOL = pydevd_constants.HTTP_JSON_PROTOCOL diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_app_engine_debug_startup.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_app_engine_debug_startup.cpython-38.pyc new file mode 100644 index 0000000..4f7c5a7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_app_engine_debug_startup.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_coverage.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_coverage.cpython-38.pyc new file mode 100644 index 0000000..e98c62f Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_coverage.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_pysrc.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_pysrc.cpython-38.pyc new file mode 100644 index 0000000..93f387a Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_pysrc.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_run_in_console.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_run_in_console.cpython-38.pyc new file mode 100644 index 0000000..f8af010 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydev_run_in_console.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevconsole.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevconsole.cpython-38.pyc new file mode 100644 index 0000000..4bb4e4c Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevconsole.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevd.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevd.cpython-38.pyc new file mode 100644 index 0000000..8140954 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevd.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevd_file_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevd_file_utils.cpython-38.pyc new file mode 100644 index 0000000..eafa387 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevd_file_utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevd_tracing.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevd_tracing.cpython-38.pyc new file mode 100644 index 0000000..c3f4ef0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/pydevd_tracing.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/setup_pydevd_cython.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/setup_pydevd_cython.cpython-38.pyc new file mode 100644 index 0000000..da2e0ba Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/__pycache__/setup_pydevd_cython.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..62245d4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_calltip_util.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_calltip_util.cpython-38.pyc new file mode 100644 index 0000000..19f03dd Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_calltip_util.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_completer.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_completer.cpython-38.pyc new file mode 100644 index 0000000..3ccbb2d Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_completer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_execfile.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_execfile.cpython-38.pyc new file mode 100644 index 0000000..5598a76 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_execfile.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_filesystem_encoding.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_filesystem_encoding.cpython-38.pyc new file mode 100644 index 0000000..d9ab4ef Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_filesystem_encoding.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_getopt.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_getopt.cpython-38.pyc new file mode 100644 index 0000000..2127ada Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_getopt.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_imports_tipper.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_imports_tipper.cpython-38.pyc new file mode 100644 index 0000000..1fdcb91 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_imports_tipper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_jy_imports_tipper.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_jy_imports_tipper.cpython-38.pyc new file mode 100644 index 0000000..cfa6eb9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_jy_imports_tipper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_log.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_log.cpython-38.pyc new file mode 100644 index 0000000..be17d0f Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_log.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_saved_modules.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_saved_modules.cpython-38.pyc new file mode 100644 index 0000000..b0813a2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_saved_modules.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_sys_patch.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_sys_patch.cpython-38.pyc new file mode 100644 index 0000000..debdeae Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_sys_patch.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_tipper_common.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_tipper_common.cpython-38.pyc new file mode 100644 index 0000000..b3ce6e6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/_pydev_tipper_common.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_console_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_console_utils.cpython-38.pyc new file mode 100644 index 0000000..19aee0a Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_console_utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_import_hook.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_import_hook.cpython-38.pyc new file mode 100644 index 0000000..35c2498 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_import_hook.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_imports.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_imports.cpython-38.pyc new file mode 100644 index 0000000..85eaa13 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_imports.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_ipython_console.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_ipython_console.cpython-38.pyc new file mode 100644 index 0000000..3b18689 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_ipython_console.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_ipython_console_011.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_ipython_console_011.cpython-38.pyc new file mode 100644 index 0000000..53ecb2f Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_ipython_console_011.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_is_thread_alive.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_is_thread_alive.cpython-38.pyc new file mode 100644 index 0000000..df1403c Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_is_thread_alive.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_localhost.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_localhost.cpython-38.pyc new file mode 100644 index 0000000..936a5ed Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_localhost.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_log.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_log.cpython-38.pyc new file mode 100644 index 0000000..59183a1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_log.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_monkey.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_monkey.cpython-38.pyc new file mode 100644 index 0000000..ee8c76f Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_monkey.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_monkey_qt.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_monkey_qt.cpython-38.pyc new file mode 100644 index 0000000..56ed67b Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_monkey_qt.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_override.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_override.cpython-38.pyc new file mode 100644 index 0000000..ca28aa9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_override.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_umd.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_umd.cpython-38.pyc new file mode 100644 index 0000000..0be2194 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_umd.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_versioncheck.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_versioncheck.cpython-38.pyc new file mode 100644 index 0000000..57af757 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/__pycache__/pydev_versioncheck.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_calltip_util.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_calltip_util.py new file mode 100644 index 0000000..aca108f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_calltip_util.py @@ -0,0 +1,155 @@ +''' +License: Apache 2.0 +Author: Yuli Fitterman +''' +import types + +from _pydevd_bundle.pydevd_constants import IS_JYTHON + +try: + import inspect +except: + import traceback; + + traceback.print_exc() # Ok, no inspect available (search will not work) + +from _pydev_bundle._pydev_imports_tipper import signature_from_docstring + + +def is_bound_method(obj): + if isinstance(obj, types.MethodType): + return getattr(obj, '__self__', getattr(obj, 'im_self', None)) is not None + else: + return False + + +def get_class_name(instance): + return getattr(getattr(instance, "__class__", None), "__name__", None) + + +def get_bound_class_name(obj): + my_self = getattr(obj, '__self__', getattr(obj, 'im_self', None)) + if my_self is None: + return None + return get_class_name(my_self) + + +def get_description(obj): + try: + ob_call = obj.__call__ + except: + ob_call = None + + if isinstance(obj, type) or type(obj).__name__ == 'classobj': + fob = getattr(obj, '__init__', lambda: None) + if not isinstance(fob, (types.FunctionType, types.MethodType)): + fob = obj + elif is_bound_method(ob_call): + fob = ob_call + else: + fob = obj + + argspec = "" + fn_name = None + fn_class = None + if isinstance(fob, (types.FunctionType, types.MethodType)): + spec_info = inspect.getfullargspec(fob) + argspec = inspect.formatargspec(*spec_info) + fn_name = getattr(fob, '__name__', None) + if isinstance(obj, type) or type(obj).__name__ == 'classobj': + fn_name = "__init__" + fn_class = getattr(obj, "__name__", "UnknownClass") + elif is_bound_method(obj) or is_bound_method(ob_call): + fn_class = get_bound_class_name(obj) or "UnknownClass" + + else: + fn_name = getattr(fob, '__name__', None) + fn_self = getattr(fob, '__self__', None) + if fn_self is not None and not isinstance(fn_self, types.ModuleType): + fn_class = get_class_name(fn_self) + + doc_string = get_docstring(ob_call) if is_bound_method(ob_call) else get_docstring(obj) + return create_method_stub(fn_name, fn_class, argspec, doc_string) + + +def create_method_stub(fn_name, fn_class, argspec, doc_string): + if fn_name and argspec: + doc_string = "" if doc_string is None else doc_string + fn_stub = create_function_stub(fn_name, argspec, doc_string, indent=1 if fn_class else 0) + if fn_class: + expr = fn_class if fn_name == '__init__' else fn_class + '().' + fn_name + return create_class_stub(fn_class, fn_stub) + "\n" + expr + else: + expr = fn_name + return fn_stub + "\n" + expr + elif doc_string: + if fn_name: + restored_signature, _ = signature_from_docstring(doc_string, fn_name) + if restored_signature: + return create_method_stub(fn_name, fn_class, restored_signature, doc_string) + return create_function_stub('unknown', '(*args, **kwargs)', doc_string) + '\nunknown' + + else: + return '' + + +def get_docstring(obj): + if obj is not None: + try: + if IS_JYTHON: + # Jython + doc = obj.__doc__ + if doc is not None: + return doc + + from _pydev_bundle import _pydev_jy_imports_tipper + + is_method, infos = _pydev_jy_imports_tipper.ismethod(obj) + ret = '' + if is_method: + for info in infos: + ret += info.get_as_doc() + return ret + + else: + + doc = inspect.getdoc(obj) + if doc is not None: + return doc + except: + pass + else: + return '' + try: + # if no attempt succeeded, try to return repr()... + return repr(obj) + except: + try: + # otherwise the class + return str(obj.__class__) + except: + # if all fails, go to an empty string + return '' + + +def create_class_stub(class_name, contents): + return "class %s(object):\n%s" % (class_name, contents) + + +def create_function_stub(fn_name, fn_argspec, fn_docstring, indent=0): + + def shift_right(string, prefix): + return ''.join(prefix + line for line in string.splitlines(True)) + + fn_docstring = shift_right(inspect.cleandoc(fn_docstring), " " * (indent + 1)) + ret = ''' +def %s%s: + """%s""" + pass +''' % (fn_name, fn_argspec, fn_docstring) + ret = ret[1:] # remove first /n + ret = ret.replace('\t', " ") + if indent: + prefix = " " * indent + ret = shift_right(ret, prefix) + return ret diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_completer.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_completer.py new file mode 100644 index 0000000..ed0db4e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_completer.py @@ -0,0 +1,267 @@ +from collections import namedtuple +from string import ascii_letters, digits + +from _pydevd_bundle import pydevd_xml +import pydevconsole + +import builtins as __builtin__ # Py3 + +try: + import java.lang # @UnusedImport + from _pydev_bundle import _pydev_jy_imports_tipper + _pydev_imports_tipper = _pydev_jy_imports_tipper +except ImportError: + IS_JYTHON = False + from _pydev_bundle import _pydev_imports_tipper + +dir2 = _pydev_imports_tipper.generate_imports_tip_for_module + + +#======================================================================================================================= +# _StartsWithFilter +#======================================================================================================================= +class _StartsWithFilter: + ''' + Used because we can't create a lambda that'll use an outer scope in jython 2.1 + ''' + + def __init__(self, start_with): + self.start_with = start_with.lower() + + def __call__(self, name): + return name.lower().startswith(self.start_with) + + +#======================================================================================================================= +# Completer +# +# This class was gotten from IPython.completer (dir2 was replaced with the completer already in pydev) +#======================================================================================================================= +class Completer: + + def __init__(self, namespace=None, global_namespace=None): + """Create a new completer for the command line. + + Completer([namespace,global_namespace]) -> completer instance. + + If unspecified, the default namespace where completions are performed + is __main__ (technically, __main__.__dict__). Namespaces should be + given as dictionaries. + + An optional second namespace can be given. This allows the completer + to handle cases where both the local and global scopes need to be + distinguished. + + Completer instances should be used as the completion mechanism of + readline via the set_completer() call: + + readline.set_completer(Completer(my_namespace).complete) + """ + + # Don't bind to namespace quite yet, but flag whether the user wants a + # specific namespace or to use __main__.__dict__. This will allow us + # to bind to __main__.__dict__ at completion time, not now. + if namespace is None: + self.use_main_ns = 1 + else: + self.use_main_ns = 0 + self.namespace = namespace + + # The global namespace, if given, can be bound directly + if global_namespace is None: + self.global_namespace = {} + else: + self.global_namespace = global_namespace + + def complete(self, text): + """Return the next possible completion for 'text'. + + This is called successively with state == 0, 1, 2, ... until it + returns None. The completion should begin with 'text'. + + """ + if self.use_main_ns: + # In pydev this option should never be used + raise RuntimeError('Namespace must be provided!') + self.namespace = __main__.__dict__ # @UndefinedVariable + + if "." in text: + return self.attr_matches(text) + else: + return self.global_matches(text) + + def global_matches(self, text): + """Compute matches when text is a simple name. + + Return a list of all keywords, built-in functions and names currently + defined in self.namespace or self.global_namespace that match. + + """ + + def get_item(obj, attr): + return obj[attr] + + a = {} + + for dict_with_comps in [__builtin__.__dict__, self.namespace, self.global_namespace]: # @UndefinedVariable + a.update(dict_with_comps) + + filter = _StartsWithFilter(text) + + return dir2(a, a.keys(), get_item, filter) + + def attr_matches(self, text): + """Compute matches when text contains a dot. + + Assuming the text is of the form NAME.NAME....[NAME], and is + evaluatable in self.namespace or self.global_namespace, it will be + evaluated and its attributes (as revealed by dir()) are used as + possible completions. (For class instances, class members are are + also considered.) + + WARNING: this can still invoke arbitrary C code, if an object + with a __getattr__ hook is evaluated. + + """ + import re + + # Another option, seems to work great. Catches things like ''. + m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) # @UndefinedVariable + + if not m: + return [] + + expr, attr = m.group(1, 3) + try: + obj = eval(expr, self.namespace) + except: + try: + obj = eval(expr, self.global_namespace) + except: + return [] + + filter = _StartsWithFilter(attr) + + words = dir2(obj, filter=filter) + + return words + + +def generate_completions(frame, act_tok): + ''' + :return list(tuple(method_name, docstring, parameters, completion_type)) + + method_name: str + docstring: str + parameters: str -- i.e.: "(a, b)" + completion_type is an int + See: _pydev_bundle._pydev_imports_tipper for TYPE_ constants + ''' + if frame is None: + return [] + + # Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329 + # (Names not resolved in generator expression in method) + # See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html + updated_globals = {} + updated_globals.update(frame.f_globals) + updated_globals.update(frame.f_locals) # locals later because it has precedence over the actual globals + + if pydevconsole.IPYTHON: + completions = pydevconsole.get_completions(act_tok, act_tok, updated_globals, frame.f_locals) + else: + completer = Completer(updated_globals, None) + # list(tuple(name, descr, parameters, type)) + completions = completer.complete(act_tok) + + return completions + + +def generate_completions_as_xml(frame, act_tok): + completions = generate_completions(frame, act_tok) + return completions_to_xml(completions) + + +def completions_to_xml(completions): + valid_xml = pydevd_xml.make_valid_xml_value + quote = pydevd_xml.quote + msg = [""] + + for comp in completions: + msg.append('') + msg.append("") + + return ''.join(msg) + + +identifier_start = ascii_letters + '_' +identifier_part = ascii_letters + '_' + digits + +identifier_start = set(identifier_start) +identifier_part = set(identifier_part) + + +def isidentifier(s): + return s.isidentifier() + + +TokenAndQualifier = namedtuple('TokenAndQualifier', 'token, qualifier') + + +def extract_token_and_qualifier(text, line=0, column=0): + ''' + Extracts the token a qualifier from the text given the line/colum + (see test_extract_token_and_qualifier for examples). + + :param unicode text: + :param int line: 0-based + :param int column: 0-based + ''' + # Note: not using the tokenize module because text should be unicode and + # line/column refer to the unicode text (otherwise we'd have to know + # those ranges after converted to bytes). + if line < 0: + line = 0 + if column < 0: + column = 0 + + if isinstance(text, bytes): + text = text.decode('utf-8') + + lines = text.splitlines() + try: + text = lines[line] + except IndexError: + return TokenAndQualifier(u'', u'') + + if column >= len(text): + column = len(text) + + text = text[:column] + token = u'' + qualifier = u'' + + temp_token = [] + for i in range(column - 1, -1, -1): + c = text[i] + if c in identifier_part or isidentifier(c) or c == u'.': + temp_token.append(c) + else: + break + temp_token = u''.join(reversed(temp_token)) + if u'.' in temp_token: + temp_token = temp_token.split(u'.') + token = u'.'.join(temp_token[:-1]) + qualifier = temp_token[-1] + else: + qualifier = temp_token + + return TokenAndQualifier(token, qualifier) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_execfile.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_execfile.py new file mode 100644 index 0000000..28ae403 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_execfile.py @@ -0,0 +1,14 @@ +# We must redefine it in Py3k if it's not already there +def execfile(file, glob=None, loc=None): + if glob is None: + import sys + glob = sys._getframe().f_back.f_globals + if loc is None: + loc = glob + + import tokenize + with tokenize.open(file) as stream: + contents = stream.read() + + # execute the script (note: it's important to compile first to have the filename set in debug mode) + exec(compile(contents + "\n", file, 'exec'), glob, loc) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py new file mode 120000 index 0000000..9e26cde --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/57/26/19/5e17accfeb7f7eb7069b39ba6f92b671cbfbc984535d671d6eac70a733 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_getopt.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_getopt.py new file mode 120000 index 0000000..abbdfe5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_getopt.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/61/8a/61/c4dc130f2683ef7b16ce75aa738d42970e6f1c5f39fab2002f01ea5c1d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_imports_tipper.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_imports_tipper.py new file mode 100644 index 0000000..7f89c75 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_imports_tipper.py @@ -0,0 +1,373 @@ +import inspect +import os.path +import sys + +from _pydev_bundle._pydev_tipper_common import do_find +from _pydevd_bundle.pydevd_utils import hasattr_checked, dir_checked + +from inspect import getfullargspec + + +def getargspec(*args, **kwargs): + arg_spec = getfullargspec(*args, **kwargs) + return arg_spec.args, arg_spec.varargs, arg_spec.varkw, arg_spec.defaults, arg_spec.kwonlyargs or [], arg_spec.kwonlydefaults or {} + + +# completion types. +TYPE_IMPORT = '0' +TYPE_CLASS = '1' +TYPE_FUNCTION = '2' +TYPE_ATTR = '3' +TYPE_BUILTIN = '4' +TYPE_PARAM = '5' + + +def _imp(name, log=None): + try: + return __import__(name) + except: + if '.' in name: + sub = name[0:name.rfind('.')] + + if log is not None: + log.add_content('Unable to import', name, 'trying with', sub) + log.add_exception() + + return _imp(sub, log) + else: + s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path) + if log is not None: + log.add_content(s) + log.add_exception() + + raise ImportError(s) + + +IS_IPY = False +if sys.platform == 'cli': + IS_IPY = True + _old_imp = _imp + + def _imp(name, log=None): + # We must add a reference in clr for .Net + import clr # @UnresolvedImport + initial_name = name + while '.' in name: + try: + clr.AddReference(name) + break # If it worked, that's OK. + except: + name = name[0:name.rfind('.')] + else: + try: + clr.AddReference(name) + except: + pass # That's OK (not dot net module). + + return _old_imp(initial_name, log) + + +def get_file(mod): + f = None + try: + f = inspect.getsourcefile(mod) or inspect.getfile(mod) + except: + try: + f = getattr(mod, '__file__', None) + except: + f = None + if f and f.lower(f[-4:]) in ['.pyc', '.pyo']: + filename = f[:-4] + '.py' + if os.path.exists(filename): + f = filename + + return f + + +def Find(name, log=None): + f = None + + mod = _imp(name, log) + parent = mod + foundAs = '' + + if inspect.ismodule(mod): + f = get_file(mod) + + components = name.split('.') + + old_comp = None + for comp in components[1:]: + try: + # this happens in the following case: + # we have mx.DateTime.mxDateTime.mxDateTime.pyd + # but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd + mod = getattr(mod, comp) + except AttributeError: + if old_comp != comp: + raise + + if inspect.ismodule(mod): + f = get_file(mod) + else: + if len(foundAs) > 0: + foundAs = foundAs + '.' + foundAs = foundAs + comp + + old_comp = comp + + return f, mod, parent, foundAs + + +def search_definition(data): + '''@return file, line, col + ''' + + data = data.replace('\n', '') + if data.endswith('.'): + data = data.rstrip('.') + f, mod, parent, foundAs = Find(data) + try: + return do_find(f, mod), foundAs + except: + return do_find(f, parent), foundAs + + +def generate_tip(data, log=None): + data = data.replace('\n', '') + if data.endswith('.'): + data = data.rstrip('.') + + f, mod, parent, foundAs = Find(data, log) + # print_ >> open('temp.txt', 'w'), f + tips = generate_imports_tip_for_module(mod) + return f, tips + + +def check_char(c): + if c == '-' or c == '.': + return '_' + return c + + +_SENTINEL = object() + + +def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True): + ''' + @param obj_to_complete: the object from where we should get the completions + @param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as kwonly_arg parameter + @param getattr: the way to get kwonly_arg given object from the obj_to_complete (used for the completer) + @param filter: kwonly_arg callable that receives the name and decides if it should be appended or not to the results + @return: list of tuples, so that each tuple represents kwonly_arg completion with: + name, doc, args, type (from the TYPE_* constants) + ''' + ret = [] + + if dir_comps is None: + dir_comps = dir_checked(obj_to_complete) + if hasattr_checked(obj_to_complete, '__dict__'): + dir_comps.append('__dict__') + if hasattr_checked(obj_to_complete, '__class__'): + dir_comps.append('__class__') + + get_complete_info = True + + if len(dir_comps) > 1000: + # ok, we don't want to let our users wait forever... + # no complete info for you... + + get_complete_info = False + + dontGetDocsOn = (float, int, str, tuple, list, dict) + dontGetattrOn = (dict, list, set, tuple) + for d in dir_comps: + + if d is None: + continue + + if not filter(d): + continue + + args = '' + + try: + try: + if isinstance(obj_to_complete, dontGetattrOn): + raise Exception('Since python 3.9, e.g. "dict[str]" will return' + " a dict that's only supposed to take strings. " + 'Interestingly, e.g. dict["val"] is also valid ' + 'and presumably represents a dict that only takes ' + 'keys that are "val". This breaks our check for ' + 'class attributes.') + obj = getattr(obj_to_complete.__class__, d) + except: + obj = getattr(obj_to_complete, d) + except: # just ignore and get it without additional info + ret.append((d, '', args, TYPE_BUILTIN)) + else: + + if get_complete_info: + try: + retType = TYPE_BUILTIN + + # check if we have to get docs + getDoc = True + for class_ in dontGetDocsOn: + + if isinstance(obj, class_): + getDoc = False + break + + doc = '' + if getDoc: + # no need to get this info... too many constants are defined and + # makes things much slower (passing all that through sockets takes quite some time) + try: + doc = inspect.getdoc(obj) + if doc is None: + doc = '' + except: # may happen on jython when checking java classes (so, just ignore it) + doc = '' + + if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj): + try: + args, vargs, kwargs, defaults, kwonly_args, kwonly_defaults = getargspec(obj) + + args = args[:] + + for kwonly_arg in kwonly_args: + default = kwonly_defaults.get(kwonly_arg, _SENTINEL) + if default is not _SENTINEL: + args.append('%s=%s' % (kwonly_arg, default)) + else: + args.append(str(kwonly_arg)) + + args = '(%s)' % (', '.join(args)) + except TypeError: + # ok, let's see if we can get the arguments from the doc + args, doc = signature_from_docstring(doc, getattr(obj, '__name__', None)) + + retType = TYPE_FUNCTION + + elif inspect.isclass(obj): + retType = TYPE_CLASS + + elif inspect.ismodule(obj): + retType = TYPE_IMPORT + + else: + retType = TYPE_ATTR + + # add token and doc to return - assure only strings. + ret.append((d, doc, args, retType)) + + except: # just ignore and get it without aditional info + ret.append((d, '', args, TYPE_BUILTIN)) + + else: # get_complete_info == False + if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj): + retType = TYPE_FUNCTION + + elif inspect.isclass(obj): + retType = TYPE_CLASS + + elif inspect.ismodule(obj): + retType = TYPE_IMPORT + + else: + retType = TYPE_ATTR + # ok, no complete info, let's try to do this as fast and clean as possible + # so, no docs for this kind of information, only the signatures + ret.append((d, '', str(args), retType)) + + return ret + + +def signature_from_docstring(doc, obj_name): + args = '()' + try: + found = False + if len(doc) > 0: + if IS_IPY: + # Handle case where we have the situation below + # sort(self, object cmp, object key) + # sort(self, object cmp, object key, bool reverse) + # sort(self) + # sort(self, object cmp) + + # Or: sort(self: list, cmp: object, key: object) + # sort(self: list, cmp: object, key: object, reverse: bool) + # sort(self: list) + # sort(self: list, cmp: object) + if obj_name: + name = obj_name + '(' + + # Fix issue where it was appearing sort(aa)sort(bb)sort(cc) in the same line. + lines = doc.splitlines() + if len(lines) == 1: + c = doc.count(name) + if c > 1: + doc = ('\n' + name).join(doc.split(name)) + + major = '' + for line in doc.splitlines(): + if line.startswith(name) and line.endswith(')'): + if len(line) > len(major): + major = line + if major: + args = major[major.index('('):] + found = True + + if not found: + i = doc.find('->') + if i < 0: + i = doc.find('--') + if i < 0: + i = doc.find('\n') + if i < 0: + i = doc.find('\r') + + if i > 0: + s = doc[0:i] + s = s.strip() + + # let's see if we have a docstring in the first line + if s[-1] == ')': + start = s.find('(') + if start >= 0: + end = s.find('[') + if end <= 0: + end = s.find(')') + if end <= 0: + end = len(s) + + args = s[start:end] + if not args[-1] == ')': + args = args + ')' + + # now, get rid of unwanted chars + l = len(args) - 1 + r = [] + for i in range(len(args)): + if i == 0 or i == l: + r.append(args[i]) + else: + r.append(check_char(args[i])) + + args = ''.join(r) + + if IS_IPY: + if args.startswith('(self:'): + i = args.find(',') + if i >= 0: + args = '(self' + args[i:] + else: + args = '(self)' + i = args.find(')') + if i > 0: + args = args[:i + 1] + + except: + pass + return args, doc diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py new file mode 100644 index 0000000..a30c4d3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py @@ -0,0 +1,492 @@ +import traceback +from io import StringIO +from java.lang import StringBuffer # @UnresolvedImport +from java.lang import String # @UnresolvedImport +import java.lang # @UnresolvedImport +import sys +from _pydev_bundle._pydev_tipper_common import do_find + +from org.python.core import PyReflectedFunction # @UnresolvedImport + +from org.python import core # @UnresolvedImport +from org.python.core import PyClass # @UnresolvedImport + +# completion types. +TYPE_IMPORT = '0' +TYPE_CLASS = '1' +TYPE_FUNCTION = '2' +TYPE_ATTR = '3' +TYPE_BUILTIN = '4' +TYPE_PARAM = '5' + + +def _imp(name): + try: + return __import__(name) + except: + if '.' in name: + sub = name[0:name.rfind('.')] + return _imp(sub) + else: + s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path) + raise RuntimeError(s) + + +import java.util +_java_rt_file = getattr(java.util, '__file__', None) + + +def Find(name): + f = None + if name.startswith('__builtin__'): + if name == '__builtin__.str': + name = 'org.python.core.PyString' + elif name == '__builtin__.dict': + name = 'org.python.core.PyDictionary' + + mod = _imp(name) + parent = mod + foundAs = '' + + try: + f = getattr(mod, '__file__', None) + except: + f = None + + components = name.split('.') + old_comp = None + for comp in components[1:]: + try: + # this happens in the following case: + # we have mx.DateTime.mxDateTime.mxDateTime.pyd + # but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd + mod = getattr(mod, comp) + except AttributeError: + if old_comp != comp: + raise + + if hasattr(mod, '__file__'): + f = mod.__file__ + else: + if len(foundAs) > 0: + foundAs = foundAs + '.' + foundAs = foundAs + comp + + old_comp = comp + + if f is None and name.startswith('java.lang'): + # Hack: java.lang.__file__ is None on Jython 2.7 (whereas it pointed to rt.jar on Jython 2.5). + f = _java_rt_file + + if f is not None: + if f.endswith('.pyc'): + f = f[:-1] + elif f.endswith('$py.class'): + f = f[:-len('$py.class')] + '.py' + return f, mod, parent, foundAs + + +def format_param_class_name(paramClassName): + if paramClassName.startswith(''): + paramClassName = paramClassName[len(' + paramClassName = paramClassName.split('\'')[1] + except: + paramClassName = repr(paramTypesClass) # just in case something else happens... it will at least be visible + # if the parameter equals [C, it means it it a char array, so, let's change it + + a = format_param_class_name(paramClassName) + # a = a.replace('[]','Array') + # a = a.replace('Object', 'obj') + # a = a.replace('String', 's') + # a = a.replace('Integer', 'i') + # a = a.replace('Char', 'c') + # a = a.replace('Double', 'd') + args.append(a) # so we don't leave invalid code + + info = Info(name, args=args, ret=ret) + # print_ info.basic_as_str() + infos.append(info) + + return 1, infos + except Exception: + s = StringIO() + traceback.print_exc(file=s) + return 1, [Info(str('ERROR'), doc=s.getvalue())] + + return 0, None + + +def ismodule(mod): + # java modules... do we have other way to know that? + if not hasattr(mod, 'getClass') and not hasattr(mod, '__class__') \ + and hasattr(mod, '__name__'): + return 1 + + return isinstance(mod, core.PyModule) + + +def dir_obj(obj): + ret = [] + found = java.util.HashMap() + original = obj + if hasattr(obj, '__class__'): + if obj.__class__ == java.lang.Class: + + # get info about superclasses + classes = [] + classes.append(obj) + try: + c = obj.getSuperclass() + except TypeError: + # may happen on jython when getting the java.lang.Class class + c = obj.getSuperclass(obj) + + while c != None: + classes.append(c) + c = c.getSuperclass() + + # get info about interfaces + interfs = [] + for obj in classes: + try: + interfs.extend(obj.getInterfaces()) + except TypeError: + interfs.extend(obj.getInterfaces(obj)) + classes.extend(interfs) + + # now is the time when we actually get info on the declared methods and fields + for obj in classes: + try: + declaredMethods = obj.getDeclaredMethods() + except TypeError: + declaredMethods = obj.getDeclaredMethods(obj) + + try: + declaredFields = obj.getDeclaredFields() + except TypeError: + declaredFields = obj.getDeclaredFields(obj) + + for i in range(len(declaredMethods)): + name = declaredMethods[i].getName() + ret.append(name) + found.put(name, 1) + + for i in range(len(declaredFields)): + name = declaredFields[i].getName() + ret.append(name) + found.put(name, 1) + + elif isclass(obj.__class__): + d = dir(obj.__class__) + for name in d: + ret.append(name) + found.put(name, 1) + + # this simple dir does not always get all the info, that's why we have the part before + # (e.g.: if we do a dir on String, some methods that are from other interfaces such as + # charAt don't appear) + d = dir(original) + for name in d: + if found.get(name) != 1: + ret.append(name) + + return ret + + +def format_arg(arg): + '''formats an argument to be shown + ''' + + s = str(arg) + dot = s.rfind('.') + if dot >= 0: + s = s[dot + 1:] + + s = s.replace(';', '') + s = s.replace('[]', 'Array') + if len(s) > 0: + c = s[0].lower() + s = c + s[1:] + + return s + + +def search_definition(data): + '''@return file, line, col + ''' + + data = data.replace('\n', '') + if data.endswith('.'): + data = data.rstrip('.') + f, mod, parent, foundAs = Find(data) + try: + return do_find(f, mod), foundAs + except: + return do_find(f, parent), foundAs + + +def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True): + ''' + @param obj_to_complete: the object from where we should get the completions + @param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter + @param getattr: the way to get a given object from the obj_to_complete (used for the completer) + @param filter: a callable that receives the name and decides if it should be appended or not to the results + @return: list of tuples, so that each tuple represents a completion with: + name, doc, args, type (from the TYPE_* constants) + ''' + ret = [] + + if dir_comps is None: + dir_comps = dir_obj(obj_to_complete) + + for d in dir_comps: + + if d is None: + continue + + if not filter(d): + continue + + args = '' + doc = '' + retType = TYPE_BUILTIN + + try: + obj = getattr(obj_to_complete, d) + except (AttributeError, java.lang.NoClassDefFoundError): + # jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if + # we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug) + # for that we need a custom classloader... we have references from it in the below places: + # + # http://mindprod.com/jgloss/classloader.html + # http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html + # http://freshmeat.net/articles/view/1643/ + # + # note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath + # before the run, everything goes fine. + # + # The code below ilustrates what I mean... + # + # import sys + # sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" ) + # + # import junit.framework + # print_ dir(junit.framework) #shows the TestCase class here + # + # import junit.framework.TestCase + # + # raises the error: + # Traceback (innermost last): + # File "", line 1, in ? + # ImportError: No module named TestCase + # + # whereas if we had added the jar to the classpath before, everything would be fine by now... + + ret.append((d, '', '', retType)) + # that's ok, private things cannot be gotten... + continue + else: + + isMet = ismethod(obj) + if isMet[0] and isMet[1]: + info = isMet[1][0] + try: + args, vargs, kwargs = info.args, info.varargs, info.kwargs + doc = info.get_as_doc() + r = '' + for a in (args): + if len(r) > 0: + r += ', ' + r += format_arg(a) + args = '(%s)' % (r) + except TypeError: + traceback.print_exc() + args = '()' + + retType = TYPE_FUNCTION + + elif isclass(obj): + retType = TYPE_CLASS + + elif ismodule(obj): + retType = TYPE_IMPORT + + # add token and doc to return - assure only strings. + ret.append((d, doc, args, retType)) + + return ret + + +if __name__ == "__main__": + sys.path.append(r'D:\dev_programs\eclipse_3\310\eclipse\plugins\org.junit_3.8.1\junit.jar') + sys.stdout.write('%s\n' % Find('junit.framework.TestCase')) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_log.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_log.py new file mode 100644 index 0000000..7328d62 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_log.py @@ -0,0 +1,24 @@ +import traceback +import sys +from io import StringIO + + +class Log: + + def __init__(self): + self._contents = [] + + def add_content(self, *content): + self._contents.append(' '.join(content)) + + def add_exception(self): + s = StringIO() + exc_info = sys.exc_info() + traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s) + self._contents.append(s.getvalue()) + + def get_contents(self): + return '\n'.join(self._contents) + + def clear_log(self): + del self._contents[:] diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_saved_modules.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_saved_modules.py new file mode 100644 index 0000000..bcf5f9b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_saved_modules.py @@ -0,0 +1,110 @@ +import sys +import os + + +def find_in_pythonpath(module_name): + # Check all the occurrences where we could match the given module/package in the PYTHONPATH. + # + # This is a simplistic approach, but probably covers most of the cases we're interested in + # (i.e.: this may fail in more elaborate cases of import customization or .zip imports, but + # this should be rare in general). + found_at = [] + + parts = module_name.split('.') # split because we need to convert mod.name to mod/name + for path in sys.path: + target = os.path.join(path, *parts) + target_py = target + '.py' + if os.path.isdir(target): + found_at.append(target) + if os.path.exists(target_py): + found_at.append(target_py) + return found_at + + +class DebuggerInitializationError(Exception): + pass + + +class VerifyShadowedImport(object): + + def __init__(self, import_name): + self.import_name = import_name + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is not None: + if exc_type == DebuggerInitializationError: + return False # It's already an error we generated. + + # We couldn't even import it... + found_at = find_in_pythonpath(self.import_name) + + if len(found_at) <= 1: + # It wasn't found anywhere or there was just 1 occurrence. + # Let's just return to show the original error. + return False + + # We found more than 1 occurrence of the same module in the PYTHONPATH + # (the user module and the standard library module). + # Let's notify the user as it seems that the module was shadowed. + msg = self._generate_shadowed_import_message(found_at) + raise DebuggerInitializationError(msg) + + def _generate_shadowed_import_message(self, found_at): + msg = '''It was not possible to initialize the debugger due to a module name conflict. + +i.e.: the module "%(import_name)s" could not be imported because it is shadowed by: +%(found_at)s +Please rename this file/folder so that the original module from the standard library can be imported.''' % { + 'import_name': self.import_name, 'found_at': found_at[0]} + + return msg + + def check(self, module, expected_attributes): + msg = '' + for expected_attribute in expected_attributes: + try: + getattr(module, expected_attribute) + except: + msg = self._generate_shadowed_import_message([module.__file__]) + break + + if msg: + raise DebuggerInitializationError(msg) + + +with VerifyShadowedImport('threading') as verify_shadowed: + import threading; verify_shadowed.check(threading, ['Thread', 'settrace', 'setprofile', 'Lock', 'RLock', 'current_thread']) + +with VerifyShadowedImport('time') as verify_shadowed: + import time; verify_shadowed.check(time, ['sleep', 'time', 'mktime']) + +with VerifyShadowedImport('socket') as verify_shadowed: + import socket; verify_shadowed.check(socket, ['socket', 'gethostname', 'getaddrinfo']) + +with VerifyShadowedImport('select') as verify_shadowed: + import select; verify_shadowed.check(select, ['select']) + +with VerifyShadowedImport('code') as verify_shadowed: + import code as _code; verify_shadowed.check(_code, ['compile_command', 'InteractiveInterpreter']) + +with VerifyShadowedImport('_thread') as verify_shadowed: + import _thread as thread; verify_shadowed.check(thread, ['start_new_thread', 'start_new', 'allocate_lock']) + +with VerifyShadowedImport('queue') as verify_shadowed: + import queue as _queue; verify_shadowed.check(_queue, ['Queue', 'LifoQueue', 'Empty', 'Full', 'deque']) + +with VerifyShadowedImport('xmlrpclib') as verify_shadowed: + import xmlrpc.client as xmlrpclib; verify_shadowed.check(xmlrpclib, ['ServerProxy', 'Marshaller', 'Server']) + +with VerifyShadowedImport('xmlrpc.server') as verify_shadowed: + import xmlrpc.server as xmlrpcserver; verify_shadowed.check(xmlrpcserver, ['SimpleXMLRPCServer']) + +with VerifyShadowedImport('http.server') as verify_shadowed: + import http.server as BaseHTTPServer; verify_shadowed.check(BaseHTTPServer, ['BaseHTTPRequestHandler']) + +# If set, this is a version of the threading.enumerate that doesn't have the patching to remove the pydevd threads. +# Note: as it can't be set during execution, don't import the name (import the module and access it through its name). +pydevd_saved_threading_enumerate = None diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_sys_patch.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_sys_patch.py new file mode 100644 index 0000000..f506750 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_sys_patch.py @@ -0,0 +1,73 @@ +import sys + + +def patch_sys_module(): + + def patched_exc_info(fun): + + def pydev_debugger_exc_info(): + type, value, traceback = fun() + if type == ImportError: + # we should not show frame added by plugin_import call + if traceback and hasattr(traceback, "tb_next"): + return type, value, traceback.tb_next + return type, value, traceback + + return pydev_debugger_exc_info + + system_exc_info = sys.exc_info + sys.exc_info = patched_exc_info(system_exc_info) + if not hasattr(sys, "system_exc_info"): + sys.system_exc_info = system_exc_info + + +def patched_reload(orig_reload): + + def pydev_debugger_reload(module): + orig_reload(module) + if module.__name__ == "sys": + # if sys module was reloaded we should patch it again + patch_sys_module() + + return pydev_debugger_reload + + +def patch_reload(): + import builtins # Py3 + + if hasattr(builtins, "reload"): + sys.builtin_orig_reload = builtins.reload + builtins.reload = patched_reload(sys.builtin_orig_reload) # @UndefinedVariable + try: + import imp + sys.imp_orig_reload = imp.reload + imp.reload = patched_reload(sys.imp_orig_reload) # @UndefinedVariable + except: + pass + else: + try: + import importlib + sys.importlib_orig_reload = importlib.reload # @UndefinedVariable + importlib.reload = patched_reload(sys.importlib_orig_reload) # @UndefinedVariable + except: + pass + + del builtins + + +def cancel_patches_in_sys_module(): + sys.exc_info = sys.system_exc_info # @UndefinedVariable + import builtins # Py3 + + if hasattr(sys, "builtin_orig_reload"): + builtins.reload = sys.builtin_orig_reload + + if hasattr(sys, "imp_orig_reload"): + import imp + imp.reload = sys.imp_orig_reload + + if hasattr(sys, "importlib_orig_reload"): + import importlib + importlib.reload = sys.importlib_orig_reload + + del builtins diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_tipper_common.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_tipper_common.py new file mode 100644 index 0000000..d97e95d --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/_pydev_tipper_common.py @@ -0,0 +1,52 @@ +import inspect +import re + + +def do_find(f, mod): + import linecache + if inspect.ismodule(mod): + return f, 0, 0 + + lines = linecache.getlines(f) + + if inspect.isclass(mod): + name = mod.__name__ + pat = re.compile(r'^\s*class\s*' + name + r'\b') + for i in range(len(lines)): + if pat.match(lines[i]): + return f, i, 0 + + return f, 0, 0 + + if inspect.ismethod(mod): + mod = mod.im_func + + if inspect.isfunction(mod): + try: + mod = mod.func_code + except AttributeError: + mod = mod.__code__ # python 3k + + if inspect.istraceback(mod): + mod = mod.tb_frame + + if inspect.isframe(mod): + mod = mod.f_code + + if inspect.iscode(mod): + if not hasattr(mod, 'co_filename'): + return None, 0, 0 + + if not hasattr(mod, 'co_firstlineno'): + return mod.co_filename, 0, 0 + + lnum = mod.co_firstlineno + pat = re.compile(r'^(\s*def\s)|(.*(? 0: + if pat.match(lines[lnum]): + break + lnum -= 1 + + return f, lnum, 0 + + raise RuntimeError('Do not know about: ' + f + ' ' + str(mod)) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__init__.py new file mode 100644 index 0000000..fe6ed41 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__init__.py @@ -0,0 +1,353 @@ +''' +Sample usage to track changes in a thread. + + import threading + import time + watcher = fsnotify.Watcher() + watcher.accepted_file_extensions = {'.py', '.pyw'} + + # Configure target values to compute throttling. + # Note: internal sleep times will be updated based on + # profiling the actual application runtime to match + # those values. + + watcher.target_time_for_single_scan = 2. + watcher.target_time_for_notification = 4. + + watcher.set_tracked_paths([target_dir]) + + def start_watching(): # Called from thread + for change_enum, change_path in watcher.iter_changes(): + if change_enum == fsnotify.Change.added: + print('Added: ', change_path) + elif change_enum == fsnotify.Change.modified: + print('Modified: ', change_path) + elif change_enum == fsnotify.Change.deleted: + print('Deleted: ', change_path) + + t = threading.Thread(target=start_watching) + t.daemon = True + t.start() + + try: + ... + finally: + watcher.dispose() + + +Note: changes are only reported for files (added/modified/deleted), not directories. +''' +import threading +import sys +from os.path import basename +from _pydev_bundle import pydev_log +from os import scandir + +try: + from enum import IntEnum +except: + + class IntEnum(object): + pass + +import time + +__author__ = 'Fabio Zadrozny' +__email__ = 'fabiofz@gmail.com' +__version__ = '0.1.5' # Version here and in setup.py + + +class Change(IntEnum): + added = 1 + modified = 2 + deleted = 3 + + +class _SingleVisitInfo(object): + + def __init__(self): + self.count = 0 + self.visited_dirs = set() + self.file_to_mtime = {} + self.last_sleep_time = time.time() + + +class _PathWatcher(object): + ''' + Helper to watch a single path. + ''' + + def __init__(self, root_path, accept_directory, accept_file, single_visit_info, max_recursion_level, sleep_time=.0): + ''' + :type root_path: str + :type accept_directory: Callback[str, bool] + :type accept_file: Callback[str, bool] + :type max_recursion_level: int + :type sleep_time: float + ''' + self.accept_directory = accept_directory + self.accept_file = accept_file + self._max_recursion_level = max_recursion_level + + self._root_path = root_path + + # Initial sleep value for throttling, it'll be auto-updated based on the + # Watcher.target_time_for_single_scan. + self.sleep_time = sleep_time + + self.sleep_at_elapsed = 1. / 30. + + # When created, do the initial snapshot right away! + old_file_to_mtime = {} + self._check(single_visit_info, lambda _change: None, old_file_to_mtime) + + def __eq__(self, o): + if isinstance(o, _PathWatcher): + return self._root_path == o._root_path + + return False + + def __ne__(self, o): + return not self == o + + def __hash__(self): + return hash(self._root_path) + + def _check_dir(self, dir_path, single_visit_info, append_change, old_file_to_mtime, level): + # This is the actual poll loop + if dir_path in single_visit_info.visited_dirs or level > self._max_recursion_level: + return + single_visit_info.visited_dirs.add(dir_path) + try: + if isinstance(dir_path, bytes): + try: + dir_path = dir_path.decode(sys.getfilesystemencoding()) + except UnicodeDecodeError: + try: + dir_path = dir_path.decode('utf-8') + except UnicodeDecodeError: + return # Ignore if we can't deal with the path. + + new_files = single_visit_info.file_to_mtime + + for entry in scandir(dir_path): + single_visit_info.count += 1 + + # Throttle if needed inside the loop + # to avoid consuming too much CPU. + if single_visit_info.count % 300 == 0: + if self.sleep_time > 0: + t = time.time() + diff = t - single_visit_info.last_sleep_time + if diff > self.sleep_at_elapsed: + time.sleep(self.sleep_time) + single_visit_info.last_sleep_time = time.time() + + if entry.is_dir(): + if self.accept_directory(entry.path): + self._check_dir(entry.path, single_visit_info, append_change, old_file_to_mtime, level + 1) + + elif self.accept_file(entry.path): + stat = entry.stat() + mtime = (stat.st_mtime_ns, stat.st_size) + path = entry.path + new_files[path] = mtime + + old_mtime = old_file_to_mtime.pop(path, None) + if not old_mtime: + append_change((Change.added, path)) + elif old_mtime != mtime: + append_change((Change.modified, path)) + + except OSError: + pass # Directory was removed in the meanwhile. + + def _check(self, single_visit_info, append_change, old_file_to_mtime): + self._check_dir(self._root_path, single_visit_info, append_change, old_file_to_mtime, 0) + + +class Watcher(object): + + # By default (if accept_directory is not specified), these will be the + # ignored directories. + ignored_dirs = {u'.git', u'__pycache__', u'.idea', u'node_modules', u'.metadata'} + + # By default (if accept_file is not specified), these will be the + # accepted files. + accepted_file_extensions = () + + # Set to the target value for doing full scan of all files (adds a sleep inside the poll loop + # which processes files to reach the target time). + # Lower values will consume more CPU + # Set to 0.0 to have no sleeps (which will result in a higher cpu load). + target_time_for_single_scan = 2.0 + + # Set the target value from the start of one scan to the start of another scan (adds a + # sleep after a full poll is done to reach the target time). + # Lower values will consume more CPU. + # Set to 0.0 to have a new scan start right away without any sleeps. + target_time_for_notification = 4.0 + + # Set to True to print the time for a single poll through all the paths. + print_poll_time = False + + # This is the maximum recursion level. + max_recursion_level = 10 + + def __init__(self, accept_directory=None, accept_file=None): + ''' + :param Callable[str, bool] accept_directory: + Callable that returns whether a directory should be watched. + Note: if passed it'll override the `ignored_dirs` + + :param Callable[str, bool] accept_file: + Callable that returns whether a file should be watched. + Note: if passed it'll override the `accepted_file_extensions`. + ''' + self._path_watchers = set() + self._disposed = threading.Event() + + if accept_directory is None: + accept_directory = lambda dir_path: basename(dir_path) not in self.ignored_dirs + if accept_file is None: + accept_file = lambda path_name: \ + not self.accepted_file_extensions or path_name.endswith(self.accepted_file_extensions) + self.accept_file = accept_file + self.accept_directory = accept_directory + self._single_visit_info = _SingleVisitInfo() + + @property + def accept_directory(self): + return self._accept_directory + + @accept_directory.setter + def accept_directory(self, accept_directory): + self._accept_directory = accept_directory + for path_watcher in self._path_watchers: + path_watcher.accept_directory = accept_directory + + @property + def accept_file(self): + return self._accept_file + + @accept_file.setter + def accept_file(self, accept_file): + self._accept_file = accept_file + for path_watcher in self._path_watchers: + path_watcher.accept_file = accept_file + + def dispose(self): + self._disposed.set() + + @property + def path_watchers(self): + return tuple(self._path_watchers) + + def set_tracked_paths(self, paths): + """ + Note: always resets all path trackers to track the passed paths. + """ + if not isinstance(paths, (list, tuple, set)): + paths = (paths,) + + # Sort by the path len so that the bigger paths come first (so, + # if there's any nesting we want the nested paths to be visited + # before the parent paths so that the max_recursion_level is correct). + paths = sorted(set(paths), key=lambda path:-len(path)) + path_watchers = set() + + self._single_visit_info = _SingleVisitInfo() + + initial_time = time.time() + for path in paths: + sleep_time = 0. # When collecting the first time, sleep_time should be 0! + path_watcher = _PathWatcher( + path, + self.accept_directory, + self.accept_file, + self._single_visit_info, + max_recursion_level=self.max_recursion_level, + sleep_time=sleep_time, + ) + + path_watchers.add(path_watcher) + + actual_time = (time.time() - initial_time) + + pydev_log.debug('Tracking the following paths for changes: %s', paths) + pydev_log.debug('Time to track: %.2fs', actual_time) + pydev_log.debug('Folders found: %s', len(self._single_visit_info.visited_dirs)) + pydev_log.debug('Files found: %s', len(self._single_visit_info.file_to_mtime)) + self._path_watchers = path_watchers + + def iter_changes(self): + ''' + Continuously provides changes (until dispose() is called). + + Changes provided are tuples with the Change enum and filesystem path. + + :rtype: Iterable[Tuple[Change, str]] + ''' + while not self._disposed.is_set(): + initial_time = time.time() + + old_visit_info = self._single_visit_info + old_file_to_mtime = old_visit_info.file_to_mtime + changes = [] + append_change = changes.append + + self._single_visit_info = single_visit_info = _SingleVisitInfo() + for path_watcher in self._path_watchers: + path_watcher._check(single_visit_info, append_change, old_file_to_mtime) + + # Note that we pop entries while visiting, so, what remained is what's deleted. + for entry in old_file_to_mtime: + append_change((Change.deleted, entry)) + + for change in changes: + yield change + + actual_time = (time.time() - initial_time) + if self.print_poll_time: + print('--- Total poll time: %.3fs' % actual_time) + + if actual_time > 0: + if self.target_time_for_single_scan <= 0.0: + for path_watcher in self._path_watchers: + path_watcher.sleep_time = 0.0 + else: + perc = self.target_time_for_single_scan / actual_time + + # Prevent from changing the values too much (go slowly into the right + # direction). + # (to prevent from cases where the user puts the machine on sleep and + # values become too skewed). + if perc > 2.: + perc = 2. + elif perc < 0.5: + perc = 0.5 + + for path_watcher in self._path_watchers: + if path_watcher.sleep_time <= 0.0: + path_watcher.sleep_time = 0.001 + new_sleep_time = path_watcher.sleep_time * perc + + # Prevent from changing the values too much (go slowly into the right + # direction). + # (to prevent from cases where the user puts the machine on sleep and + # values become too skewed). + diff_sleep_time = new_sleep_time - path_watcher.sleep_time + path_watcher.sleep_time += (diff_sleep_time / (3.0 * len(self._path_watchers))) + + if actual_time > 0: + self._disposed.wait(actual_time) + + if path_watcher.sleep_time < 0.001: + path_watcher.sleep_time = 0.001 + + # print('new sleep time: %s' % path_watcher.sleep_time) + + diff = self.target_time_for_notification - actual_time + if diff > 0.: + self._disposed.wait(diff) + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..53d09b8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_console_utils.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_console_utils.py new file mode 100644 index 0000000..5c87ac8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_console_utils.py @@ -0,0 +1,639 @@ +import os +import sys +import traceback +from _pydev_bundle.pydev_imports import xmlrpclib, _queue, Exec +from _pydev_bundle._pydev_calltip_util import get_description +from _pydevd_bundle import pydevd_vars +from _pydevd_bundle import pydevd_xml +from _pydevd_bundle.pydevd_constants import (IS_JYTHON, NEXT_VALUE_SEPARATOR, get_global_debugger, + silence_warnings_decorator) +from contextlib import contextmanager +from _pydev_bundle import pydev_log +from _pydevd_bundle.pydevd_utils import interrupt_main_thread + +from io import StringIO + + +# ======================================================================================================================= +# BaseStdIn +# ======================================================================================================================= +class BaseStdIn: + + def __init__(self, original_stdin=sys.stdin, *args, **kwargs): + try: + self.encoding = sys.stdin.encoding + except: + # Not sure if it's available in all Python versions... + pass + self.original_stdin = original_stdin + + try: + self.errors = sys.stdin.errors # Who knew? sys streams have an errors attribute! + except: + # Not sure if it's available in all Python versions... + pass + + def readline(self, *args, **kwargs): + # sys.stderr.write('Cannot readline out of the console evaluation\n') -- don't show anything + # This could happen if the user had done input('enter number).<-- upon entering this, that message would appear, + # which is not something we want. + return '\n' + + def write(self, *args, **kwargs): + pass # not available StdIn (but it can be expected to be in the stream interface) + + def flush(self, *args, **kwargs): + pass # not available StdIn (but it can be expected to be in the stream interface) + + def read(self, *args, **kwargs): + # in the interactive interpreter, a read and a readline are the same. + return self.readline() + + def close(self, *args, **kwargs): + pass # expected in StdIn + + def __iter__(self): + # BaseStdIn would not be considered as Iterable in Python 3 without explicit `__iter__` implementation + return self.original_stdin.__iter__() + + def __getattr__(self, item): + # it's called if the attribute wasn't found + if hasattr(self.original_stdin, item): + return getattr(self.original_stdin, item) + raise AttributeError("%s has no attribute %s" % (self.original_stdin, item)) + + +# ======================================================================================================================= +# StdIn +# ======================================================================================================================= +class StdIn(BaseStdIn): + ''' + Object to be added to stdin (to emulate it as non-blocking while the next line arrives) + ''' + + def __init__(self, interpreter, host, client_port, original_stdin=sys.stdin): + BaseStdIn.__init__(self, original_stdin) + self.interpreter = interpreter + self.client_port = client_port + self.host = host + + def readline(self, *args, **kwargs): + # Ok, callback into the client to get the new input + try: + server = xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port)) + requested_input = server.RequestInput() + if not requested_input: + return '\n' # Yes, a readline must return something (otherwise we can get an EOFError on the input() call). + else: + # readline should end with '\n' (not doing so makes IPython 5 remove the last *valid* character). + requested_input += '\n' + return requested_input + except KeyboardInterrupt: + raise # Let KeyboardInterrupt go through -- #PyDev-816: Interrupting infinite loop in the Interactive Console + except: + return '\n' + + def close(self, *args, **kwargs): + pass # expected in StdIn + + +#======================================================================================================================= +# DebugConsoleStdIn +#======================================================================================================================= +class DebugConsoleStdIn(BaseStdIn): + ''' + Object to be added to stdin (to emulate it as non-blocking while the next line arrives) + ''' + + def __init__(self, py_db, original_stdin): + ''' + :param py_db: + If None, get_global_debugger() is used. + ''' + BaseStdIn.__init__(self, original_stdin) + self._py_db = py_db + self._in_notification = 0 + + def __send_input_requested_message(self, is_started): + try: + py_db = self._py_db + if py_db is None: + py_db = get_global_debugger() + + if py_db is None: + return + + cmd = py_db.cmd_factory.make_input_requested_message(is_started) + py_db.writer.add_command(cmd) + except Exception: + pydev_log.exception() + + @contextmanager + def notify_input_requested(self): + self._in_notification += 1 + if self._in_notification == 1: + self.__send_input_requested_message(True) + try: + yield + finally: + self._in_notification -= 1 + if self._in_notification == 0: + self.__send_input_requested_message(False) + + def readline(self, *args, **kwargs): + with self.notify_input_requested(): + return self.original_stdin.readline(*args, **kwargs) + + def read(self, *args, **kwargs): + with self.notify_input_requested(): + return self.original_stdin.read(*args, **kwargs) + + +class CodeFragment: + + def __init__(self, text, is_single_line=True): + self.text = text + self.is_single_line = is_single_line + + def append(self, code_fragment): + self.text = self.text + "\n" + code_fragment.text + if not code_fragment.is_single_line: + self.is_single_line = False + + +# ======================================================================================================================= +# BaseInterpreterInterface +# ======================================================================================================================= +class BaseInterpreterInterface: + + def __init__(self, mainThread, connect_status_queue=None): + self.mainThread = mainThread + self.interruptable = False + self.exec_queue = _queue.Queue(0) + self.buffer = None + self.banner_shown = False + self.connect_status_queue = connect_status_queue + self.mpl_modules_for_patching = {} + self.init_mpl_modules_for_patching() + + def build_banner(self): + return 'print({0})\n'.format(repr(self.get_greeting_msg())) + + def get_greeting_msg(self): + return 'PyDev console: starting.\n' + + def init_mpl_modules_for_patching(self): + from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot + self.mpl_modules_for_patching = { + "matplotlib": lambda: activate_matplotlib(self.enableGui), + "matplotlib.pyplot": activate_pyplot, + "pylab": activate_pylab + } + + def need_more_for_code(self, source): + # PyDev-502: PyDev 3.9 F2 doesn't support backslash continuations + + # Strangely even the IPython console is_complete said it was complete + # even with a continuation char at the end. + if source.endswith('\\'): + return True + + if hasattr(self.interpreter, 'is_complete'): + return not self.interpreter.is_complete(source) + try: + # At this point, it should always be single. + # If we don't do this, things as: + # + # for i in range(10): print(i) + # + # (in a single line) don't work. + # Note that it won't give an error and code will be None (so, it'll + # use execMultipleLines in the next call in this case). + symbol = 'single' + code = self.interpreter.compile(source, '', symbol) + except (OverflowError, SyntaxError, ValueError): + # Case 1 + return False + if code is None: + # Case 2 + return True + + # Case 3 + return False + + def need_more(self, code_fragment): + if self.buffer is None: + self.buffer = code_fragment + else: + self.buffer.append(code_fragment) + + return self.need_more_for_code(self.buffer.text) + + def create_std_in(self, debugger=None, original_std_in=None): + if debugger is None: + return StdIn(self, self.host, self.client_port, original_stdin=original_std_in) + else: + return DebugConsoleStdIn(py_db=debugger, original_stdin=original_std_in) + + def add_exec(self, code_fragment, debugger=None): + # In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+ + # (showtraceback does it on python 3.5 onwards) + sys.excepthook = sys.__excepthook__ + try: + original_in = sys.stdin + try: + help = None + if 'pydoc' in sys.modules: + pydoc = sys.modules['pydoc'] # Don't import it if it still is not there. + + if hasattr(pydoc, 'help'): + # You never know how will the API be changed, so, let's code defensively here + help = pydoc.help + if not hasattr(help, 'input'): + help = None + except: + # Just ignore any error here + pass + + more = False + try: + sys.stdin = self.create_std_in(debugger, original_in) + try: + if help is not None: + # This will enable the help() function to work. + try: + try: + help.input = sys.stdin + except AttributeError: + help._input = sys.stdin + except: + help = None + if not self._input_error_printed: + self._input_error_printed = True + sys.stderr.write('\nError when trying to update pydoc.help.input\n') + sys.stderr.write('(help() may not work -- please report this as a bug in the pydev bugtracker).\n\n') + traceback.print_exc() + + try: + self.start_exec() + if hasattr(self, 'debugger'): + self.debugger.enable_tracing() + + more = self.do_add_exec(code_fragment) + + if hasattr(self, 'debugger'): + self.debugger.disable_tracing() + + self.finish_exec(more) + finally: + if help is not None: + try: + try: + help.input = original_in + except AttributeError: + help._input = original_in + except: + pass + + finally: + sys.stdin = original_in + except SystemExit: + raise + except: + traceback.print_exc() + finally: + sys.__excepthook__ = sys.excepthook + + return more + + def do_add_exec(self, codeFragment): + ''' + Subclasses should override. + + @return: more (True if more input is needed to complete the statement and False if the statement is complete). + ''' + raise NotImplementedError() + + def get_namespace(self): + ''' + Subclasses should override. + + @return: dict with namespace. + ''' + raise NotImplementedError() + + def __resolve_reference__(self, text): + """ + + :type text: str + """ + obj = None + if '.' not in text: + try: + obj = self.get_namespace()[text] + except KeyError: + pass + + if obj is None: + try: + obj = self.get_namespace()['__builtins__'][text] + except: + pass + + if obj is None: + try: + obj = getattr(self.get_namespace()['__builtins__'], text, None) + except: + pass + + else: + try: + last_dot = text.rindex('.') + parent_context = text[0:last_dot] + res = pydevd_vars.eval_in_context(parent_context, self.get_namespace(), self.get_namespace()) + obj = getattr(res, text[last_dot + 1:]) + except: + pass + return obj + + def getDescription(self, text): + try: + obj = self.__resolve_reference__(text) + if obj is None: + return '' + return get_description(obj) + except: + return '' + + def do_exec_code(self, code, is_single_line): + try: + code_fragment = CodeFragment(code, is_single_line) + more = self.need_more(code_fragment) + if not more: + code_fragment = self.buffer + self.buffer = None + self.exec_queue.put(code_fragment) + + return more + except: + traceback.print_exc() + return False + + def execLine(self, line): + return self.do_exec_code(line, True) + + def execMultipleLines(self, lines): + if IS_JYTHON: + more = False + for line in lines.split('\n'): + more = self.do_exec_code(line, True) + return more + else: + return self.do_exec_code(lines, False) + + def interrupt(self): + self.buffer = None # Also clear the buffer when it's interrupted. + try: + if self.interruptable: + # Fix for #PyDev-500: Console interrupt can't interrupt on sleep + interrupt_main_thread(self.mainThread) + + self.finish_exec(False) + return True + except: + traceback.print_exc() + return False + + def close(self): + sys.exit(0) + + def start_exec(self): + self.interruptable = True + + def get_server(self): + if getattr(self, 'host', None) is not None: + return xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port)) + else: + return None + + server = property(get_server) + + def ShowConsole(self): + server = self.get_server() + if server is not None: + server.ShowConsole() + + def finish_exec(self, more): + self.interruptable = False + + server = self.get_server() + + if server is not None: + return server.NotifyFinished(more) + else: + return True + + def getFrame(self): + xml = StringIO() + hidden_ns = self.get_ipython_hidden_vars_dict() + xml.write("") + xml.write(pydevd_xml.frame_vars_to_xml(self.get_namespace(), hidden_ns)) + xml.write("") + + return xml.getvalue() + + @silence_warnings_decorator + def getVariable(self, attributes): + xml = StringIO() + xml.write("") + val_dict = pydevd_vars.resolve_compound_var_object_fields(self.get_namespace(), attributes) + if val_dict is None: + val_dict = {} + + for k, val in val_dict.items(): + val = val_dict[k] + evaluate_full_value = pydevd_xml.should_evaluate_full_value(val) + xml.write(pydevd_vars.var_to_xml(val, k, evaluate_full_value=evaluate_full_value)) + + xml.write("") + + return xml.getvalue() + + def getArray(self, attr, roffset, coffset, rows, cols, format): + name = attr.split("\t")[-1] + array = pydevd_vars.eval_in_context(name, self.get_namespace(), self.get_namespace()) + return pydevd_vars.table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format) + + def evaluate(self, expression): + xml = StringIO() + xml.write("") + result = pydevd_vars.eval_in_context(expression, self.get_namespace(), self.get_namespace()) + xml.write(pydevd_vars.var_to_xml(result, expression)) + xml.write("") + return xml.getvalue() + + @silence_warnings_decorator + def loadFullValue(self, seq, scope_attrs): + """ + Evaluate full value for async Console variables in a separate thread and send results to IDE side + :param seq: id of command + :param scope_attrs: a sequence of variables with their attributes separated by NEXT_VALUE_SEPARATOR + (i.e.: obj\tattr1\tattr2NEXT_VALUE_SEPARATORobj2\attr1\tattr2) + :return: + """ + frame_variables = self.get_namespace() + var_objects = [] + vars = scope_attrs.split(NEXT_VALUE_SEPARATOR) + for var_attrs in vars: + if '\t' in var_attrs: + name, attrs = var_attrs.split('\t', 1) + + else: + name = var_attrs + attrs = None + if name in frame_variables: + var_object = pydevd_vars.resolve_var_object(frame_variables[name], attrs) + var_objects.append((var_object, name)) + else: + var_object = pydevd_vars.eval_in_context(name, frame_variables, frame_variables) + var_objects.append((var_object, name)) + + from _pydevd_bundle.pydevd_comm import GetValueAsyncThreadConsole + py_db = getattr(self, 'debugger', None) + + if py_db is None: + py_db = get_global_debugger() + + if py_db is None: + from pydevd import PyDB + py_db = PyDB() + + t = GetValueAsyncThreadConsole(py_db, self.get_server(), seq, var_objects) + t.start() + + def changeVariable(self, attr, value): + + def do_change_variable(): + Exec('%s=%s' % (attr, value), self.get_namespace(), self.get_namespace()) + + # Important: it has to be really enabled in the main thread, so, schedule + # it to run in the main thread. + self.exec_queue.put(do_change_variable) + + def connectToDebugger(self, debuggerPort, debugger_options=None): + ''' + Used to show console with variables connection. + Mainly, monkey-patches things in the debugger structure so that the debugger protocol works. + ''' + + if debugger_options is None: + debugger_options = {} + env_key = "PYDEVD_EXTRA_ENVS" + if env_key in debugger_options: + for (env_name, value) in debugger_options[env_key].items(): + existing_value = os.environ.get(env_name, None) + if existing_value: + os.environ[env_name] = "%s%c%s" % (existing_value, os.path.pathsep, value) + else: + os.environ[env_name] = value + if env_name == "PYTHONPATH": + sys.path.append(value) + + del debugger_options[env_key] + + def do_connect_to_debugger(): + try: + # Try to import the packages needed to attach the debugger + import pydevd + from _pydev_bundle._pydev_saved_modules import threading + except: + # This happens on Jython embedded in host eclipse + traceback.print_exc() + sys.stderr.write('pydevd is not available, cannot connect\n') + + from _pydevd_bundle.pydevd_constants import set_thread_id + from _pydev_bundle import pydev_localhost + set_thread_id(threading.current_thread(), "console_main") + + VIRTUAL_FRAME_ID = "1" # matches PyStackFrameConsole.java + VIRTUAL_CONSOLE_ID = "console_main" # matches PyThreadConsole.java + f = FakeFrame() + f.f_back = None + f.f_globals = {} # As globals=locals here, let's simply let it empty (and save a bit of network traffic). + f.f_locals = self.get_namespace() + + self.debugger = pydevd.PyDB() + self.debugger.add_fake_frame(thread_id=VIRTUAL_CONSOLE_ID, frame_id=VIRTUAL_FRAME_ID, frame=f) + try: + pydevd.apply_debugger_options(debugger_options) + self.debugger.connect(pydev_localhost.get_localhost(), debuggerPort) + self.debugger.prepare_to_run() + self.debugger.disable_tracing() + except: + traceback.print_exc() + sys.stderr.write('Failed to connect to target debugger.\n') + + # Register to process commands when idle + self.debugrunning = False + try: + import pydevconsole + pydevconsole.set_debug_hook(self.debugger.process_internal_commands) + except: + traceback.print_exc() + sys.stderr.write('Version of Python does not support debuggable Interactive Console.\n') + + # Important: it has to be really enabled in the main thread, so, schedule + # it to run in the main thread. + self.exec_queue.put(do_connect_to_debugger) + + return ('connect complete',) + + def handshake(self): + if self.connect_status_queue is not None: + self.connect_status_queue.put(True) + return "PyCharm" + + def get_connect_status_queue(self): + return self.connect_status_queue + + def hello(self, input_str): + # Don't care what the input string is + return ("Hello eclipse",) + + def enableGui(self, guiname): + ''' Enable the GUI specified in guiname (see inputhook for list). + As with IPython, enabling multiple GUIs isn't an error, but + only the last one's main loop runs and it may not work + ''' + + def do_enable_gui(): + from _pydev_bundle.pydev_versioncheck import versionok_for_gui + if versionok_for_gui(): + try: + from pydev_ipython.inputhook import enable_gui + enable_gui(guiname) + except: + sys.stderr.write("Failed to enable GUI event loop integration for '%s'\n" % guiname) + traceback.print_exc() + elif guiname not in ['none', '', None]: + # Only print a warning if the guiname was going to do something + sys.stderr.write("PyDev console: Python version does not support GUI event loop integration for '%s'\n" % guiname) + # Return value does not matter, so return back what was sent + return guiname + + # Important: it has to be really enabled in the main thread, so, schedule + # it to run in the main thread. + self.exec_queue.put(do_enable_gui) + + def get_ipython_hidden_vars_dict(self): + return None + + +# ======================================================================================================================= +# FakeFrame +# ======================================================================================================================= +class FakeFrame: + ''' + Used to show console with variables connection. + A class to be used as a mock of a frame. + ''' diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_import_hook.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_import_hook.py new file mode 100644 index 0000000..519d8d7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_import_hook.py @@ -0,0 +1,40 @@ + +import sys +import traceback +from types import ModuleType +from _pydevd_bundle.pydevd_constants import DebugInfoHolder + +import builtins + + +class ImportHookManager(ModuleType): + + def __init__(self, name, system_import): + ModuleType.__init__(self, name) + self._system_import = system_import + self._modules_to_patch = {} + + def add_module_name(self, module_name, activate_function): + self._modules_to_patch[module_name] = activate_function + + def do_import(self, name, *args, **kwargs): + module = self._system_import(name, *args, **kwargs) + try: + activate_func = self._modules_to_patch.pop(name, None) + if activate_func: + activate_func() # call activate function + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2: + traceback.print_exc() + + # Restore normal system importer to reduce performance impact + # of calling this method every time an import statement is invoked + if not self._modules_to_patch: + builtins.__import__ = self._system_import + + return module + + +import_hook_manager = ImportHookManager(__name__ + '.import_hook', builtins.__import__) +builtins.__import__ = import_hook_manager.do_import +sys.modules[import_hook_manager.__name__] = import_hook_manager diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_imports.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_imports.py new file mode 100644 index 0000000..edc2429 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_imports.py @@ -0,0 +1,13 @@ +from _pydev_bundle._pydev_saved_modules import xmlrpclib +from _pydev_bundle._pydev_saved_modules import xmlrpcserver + +SimpleXMLRPCServer = xmlrpcserver.SimpleXMLRPCServer + +from _pydev_bundle._pydev_execfile import execfile + +from _pydev_bundle._pydev_saved_modules import _queue + +from _pydevd_bundle.pydevd_exec2 import Exec + +from urllib.parse import quote, quote_plus, unquote_plus # @UnresolvedImport + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console.py new file mode 100644 index 0000000..a1221f9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console.py @@ -0,0 +1,97 @@ +import sys +from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface + +import traceback + +# Uncomment to force PyDev standard shell. +# raise ImportError() + +from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend + + +#======================================================================================================================= +# InterpreterInterface +#======================================================================================================================= +class InterpreterInterface(BaseInterpreterInterface): + ''' + The methods in this class should be registered in the xml-rpc server. + ''' + + def __init__(self, host, client_port, main_thread, show_banner=True, connect_status_queue=None): + BaseInterpreterInterface.__init__(self, main_thread, connect_status_queue) + self.client_port = client_port + self.host = host + self.interpreter = get_pydev_frontend(host, client_port) + self._input_error_printed = False + self.notification_succeeded = False + self.notification_tries = 0 + self.notification_max_tries = 3 + self.show_banner = show_banner + + self.notify_about_magic() + + def get_greeting_msg(self): + if self.show_banner: + self.interpreter.show_banner() + return self.interpreter.get_greeting_msg() + + def do_add_exec(self, code_fragment): + self.notify_about_magic() + if code_fragment.text.rstrip().endswith('??'): + print('IPython-->') + try: + res = bool(self.interpreter.add_exec(code_fragment.text)) + finally: + if code_fragment.text.rstrip().endswith('??'): + print('<--IPython') + + return res + + def get_namespace(self): + return self.interpreter.get_namespace() + + def getCompletions(self, text, act_tok): + return self.interpreter.getCompletions(text, act_tok) + + def close(self): + sys.exit(0) + + def notify_about_magic(self): + if not self.notification_succeeded: + self.notification_tries += 1 + if self.notification_tries > self.notification_max_tries: + return + completions = self.getCompletions("%", "%") + magic_commands = [x[0] for x in completions] + + server = self.get_server() + + if server is not None: + try: + server.NotifyAboutMagic(magic_commands, self.interpreter.is_automagic()) + self.notification_succeeded = True + except: + self.notification_succeeded = False + + def get_ipython_hidden_vars_dict(self): + try: + if hasattr(self.interpreter, 'ipython') and hasattr(self.interpreter.ipython, 'user_ns_hidden'): + user_ns_hidden = self.interpreter.ipython.user_ns_hidden + if isinstance(user_ns_hidden, dict): + # Since IPython 2 dict `user_ns_hidden` contains hidden variables and values + user_hidden_dict = user_ns_hidden.copy() + else: + # In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables + user_hidden_dict = dict([(key, val) for key, val in self.interpreter.ipython.user_ns.items() + if key in user_ns_hidden]) + + # while `_`, `__` and `___` were not initialized, they are not presented in `user_ns_hidden` + user_hidden_dict.setdefault('_', '') + user_hidden_dict.setdefault('__', '') + user_hidden_dict.setdefault('___', '') + + return user_hidden_dict + except: + # Getting IPython variables shouldn't break loading frame variables + traceback.print_exc() + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console_011.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console_011.py new file mode 100644 index 0000000..eaf4738 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console_011.py @@ -0,0 +1,516 @@ +# TODO that would make IPython integration better +# - show output other times then when enter was pressed +# - support proper exit to allow IPython to cleanup (e.g. temp files created with %edit) +# - support Ctrl-D (Ctrl-Z on Windows) +# - use IPython (numbered) prompts in PyDev +# - better integration of IPython and PyDev completions +# - some of the semantics on handling the code completion are not correct: +# eg: Start a line with % and then type c should give %cd as a completion by it doesn't +# however type %c and request completions and %cd is given as an option +# eg: Completing a magic when user typed it without the leading % causes the % to be inserted +# to the left of what should be the first colon. +"""Interface to TerminalInteractiveShell for PyDev Interactive Console frontend + for IPython 0.11 to 1.0+. +""" + +from __future__ import print_function + +import os +import sys +import codeop +import traceback + +from IPython.core.error import UsageError +from IPython.core.completer import IPCompleter +from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC +from IPython.core.usage import default_banner_parts +from IPython.utils.strdispatch import StrDispatch +import IPython.core.release as IPythonRelease +from IPython.terminal.interactiveshell import TerminalInteractiveShell +try: + from traitlets import CBool, Unicode +except ImportError: + from IPython.utils.traitlets import CBool, Unicode +from IPython.core import release + +from _pydev_bundle.pydev_imports import xmlrpclib + +default_pydev_banner_parts = default_banner_parts + +default_pydev_banner = ''.join(default_pydev_banner_parts) + + +def show_in_pager(self, strng, *args, **kwargs): + """ Run a string through pager """ + # On PyDev we just output the string, there are scroll bars in the console + # to handle "paging". This is the same behaviour as when TERM==dump (see + # page.py) + # for compatibility with mime-bundle form: + if isinstance(strng, dict): + strng = strng.get('text/plain', strng) + print(strng) + + +def create_editor_hook(pydev_host, pydev_client_port): + + def call_editor(filename, line=0, wait=True): + """ Open an editor in PyDev """ + if line is None: + line = 0 + + # Make sure to send an absolution path because unlike most editor hooks + # we don't launch a process. This is more like what happens in the zmqshell + filename = os.path.abspath(filename) + + # import sys + # sys.__stderr__.write('Calling editor at: %s:%s\n' % (pydev_host, pydev_client_port)) + + # Tell PyDev to open the editor + server = xmlrpclib.Server('http://%s:%s' % (pydev_host, pydev_client_port)) + server.IPythonEditor(filename, str(line)) + + if wait: + input("Press Enter when done editing:") + + return call_editor + + +class PyDevIPCompleter(IPCompleter): + + def __init__(self, *args, **kwargs): + """ Create a Completer that reuses the advanced completion support of PyDev + in addition to the completion support provided by IPython """ + IPCompleter.__init__(self, *args, **kwargs) + # Use PyDev for python matches, see getCompletions below + if self.python_matches in self.matchers: + # `self.python_matches` matches attributes or global python names + self.matchers.remove(self.python_matches) + + +class PyDevIPCompleter6(IPCompleter): + + def __init__(self, *args, **kwargs): + """ Create a Completer that reuses the advanced completion support of PyDev + in addition to the completion support provided by IPython """ + IPCompleter.__init__(self, *args, **kwargs) + + @property + def matchers(self): + """All active matcher routines for completion""" + # To remove python_matches we now have to override it as it's now a property in the superclass. + return [ + self.file_matches, + self.magic_matches, + self.python_func_kw_matches, + self.dict_key_matches, + ] + + @matchers.setter + def matchers(self, value): + # To stop the init in IPCompleter raising an AttributeError we now have to specify a setter as it's now a property in the superclass. + return + + +class PyDevTerminalInteractiveShell(TerminalInteractiveShell): + banner1 = Unicode(default_pydev_banner, config=True, + help="""The part of the banner to be printed before the profile""" + ) + + # TODO term_title: (can PyDev's title be changed???, see terminal.py for where to inject code, in particular set_term_title as used by %cd) + # for now, just disable term_title + term_title = CBool(False) + + # Note in version 0.11 there is no guard in the IPython code about displaying a + # warning, so with 0.11 you get: + # WARNING: Readline services not available or not loaded. + # WARNING: The auto-indent feature requires the readline library + # Disable readline, readline type code is all handled by PyDev (on Java side) + readline_use = CBool(False) + # autoindent has no meaning in PyDev (PyDev always handles that on the Java side), + # and attempting to enable it will print a warning in the absence of readline. + autoindent = CBool(False) + # Force console to not give warning about color scheme choice and default to NoColor. + # TODO It would be nice to enable colors in PyDev but: + # - The PyDev Console (Eclipse Console) does not support the full range of colors, so the + # effect isn't as nice anyway at the command line + # - If done, the color scheme should default to LightBG, but actually be dependent on + # any settings the user has (such as if a dark theme is in use, then Linux is probably + # a better theme). + colors_force = CBool(True) + colors = Unicode("NoColor") + # Since IPython 5 the terminal interface is not compatible with Emacs `inferior-shell` and + # the `simple_prompt` flag is needed + simple_prompt = CBool(True) + + # In the PyDev Console, GUI control is done via hookable XML-RPC server + @staticmethod + def enable_gui(gui=None, app=None): + """Switch amongst GUI input hooks by name. + """ + # Deferred import + from pydev_ipython.inputhook import enable_gui as real_enable_gui + try: + return real_enable_gui(gui, app) + except ValueError as e: + raise UsageError("%s" % e) + + #------------------------------------------------------------------------- + # Things related to hooks + #------------------------------------------------------------------------- + + def init_history(self): + # Disable history so that we don't have an additional thread for that + # (and we don't use the history anyways). + self.config.HistoryManager.enabled = False + super(PyDevTerminalInteractiveShell, self).init_history() + + def init_hooks(self): + super(PyDevTerminalInteractiveShell, self).init_hooks() + self.set_hook('show_in_pager', show_in_pager) + + #------------------------------------------------------------------------- + # Things related to exceptions + #------------------------------------------------------------------------- + + def showtraceback(self, exc_tuple=None, *args, **kwargs): + # IPython does a lot of clever stuff with Exceptions. However mostly + # it is related to IPython running in a terminal instead of an IDE. + # (e.g. it prints out snippets of code around the stack trace) + # PyDev does a lot of clever stuff too, so leave exception handling + # with default print_exc that PyDev can parse and do its clever stuff + # with (e.g. it puts links back to the original source code) + try: + if exc_tuple is None: + etype, value, tb = sys.exc_info() + else: + etype, value, tb = exc_tuple + except ValueError: + return + + if tb is not None: + traceback.print_exception(etype, value, tb) + + #------------------------------------------------------------------------- + # Things related to text completion + #------------------------------------------------------------------------- + + # The way to construct an IPCompleter changed in most versions, + # so we have a custom, per version implementation of the construction + + def _new_completer_100(self): + completer = PyDevIPCompleter(shell=self, + namespace=self.user_ns, + global_namespace=self.user_global_ns, + alias_table=self.alias_manager.alias_table, + use_readline=self.has_readline, + parent=self, + ) + return completer + + def _new_completer_234(self): + # correct for IPython versions 2.x, 3.x, 4.x + completer = PyDevIPCompleter(shell=self, + namespace=self.user_ns, + global_namespace=self.user_global_ns, + use_readline=self.has_readline, + parent=self, + ) + return completer + + def _new_completer_500(self): + completer = PyDevIPCompleter(shell=self, + namespace=self.user_ns, + global_namespace=self.user_global_ns, + use_readline=False, + parent=self + ) + return completer + + def _new_completer_600(self): + completer = PyDevIPCompleter6(shell=self, + namespace=self.user_ns, + global_namespace=self.user_global_ns, + use_readline=False, + parent=self + ) + return completer + + def add_completer_hooks(self): + from IPython.core.completerlib import module_completer, magic_run_completer, cd_completer + try: + from IPython.core.completerlib import reset_completer + except ImportError: + # reset_completer was added for rel-0.13 + reset_completer = None + self.configurables.append(self.Completer) + + # Add custom completers to the basic ones built into IPCompleter + sdisp = self.strdispatchers.get('complete_command', StrDispatch()) + self.strdispatchers['complete_command'] = sdisp + self.Completer.custom_completers = sdisp + + self.set_hook('complete_command', module_completer, str_key='import') + self.set_hook('complete_command', module_completer, str_key='from') + self.set_hook('complete_command', magic_run_completer, str_key='%run') + self.set_hook('complete_command', cd_completer, str_key='%cd') + if reset_completer: + self.set_hook('complete_command', reset_completer, str_key='%reset') + + def init_completer(self): + """Initialize the completion machinery. + + This creates a completer that provides the completions that are + IPython specific. We use this to supplement PyDev's core code + completions. + """ + # PyDev uses its own completer and custom hooks so that it uses + # most completions from PyDev's core completer which provides + # extra information. + # See getCompletions for where the two sets of results are merged + + if IPythonRelease._version_major >= 6: + self.Completer = self._new_completer_600() + elif IPythonRelease._version_major >= 5: + self.Completer = self._new_completer_500() + elif IPythonRelease._version_major >= 2: + self.Completer = self._new_completer_234() + elif IPythonRelease._version_major >= 1: + self.Completer = self._new_completer_100() + + if hasattr(self.Completer, 'use_jedi'): + self.Completer.use_jedi = False + + self.add_completer_hooks() + + if IPythonRelease._version_major <= 3: + # Only configure readline if we truly are using readline. IPython can + # do tab-completion over the network, in GUIs, etc, where readline + # itself may be absent + if self.has_readline: + self.set_readline_completer() + + #------------------------------------------------------------------------- + # Things related to aliases + #------------------------------------------------------------------------- + + def init_alias(self): + # InteractiveShell defines alias's we want, but TerminalInteractiveShell defines + # ones we don't. So don't use super and instead go right to InteractiveShell + InteractiveShell.init_alias(self) + + #------------------------------------------------------------------------- + # Things related to exiting + #------------------------------------------------------------------------- + def ask_exit(self): + """ Ask the shell to exit. Can be overiden and used as a callback. """ + # TODO PyDev's console does not have support from the Python side to exit + # the console. If user forces the exit (with sys.exit()) then the console + # simply reports errors. e.g.: + # >>> import sys + # >>> sys.exit() + # Failed to create input stream: Connection refused + # >>> + # Console already exited with value: 0 while waiting for an answer. + # Error stream: + # Output stream: + # >>> + # + # Alternatively if you use the non-IPython shell this is what happens + # >>> exit() + # :None + # >>> + # :None + # >>> + # + super(PyDevTerminalInteractiveShell, self).ask_exit() + print('To exit the PyDev Console, terminate the console within IDE.') + + #------------------------------------------------------------------------- + # Things related to magics + #------------------------------------------------------------------------- + + def init_magics(self): + super(PyDevTerminalInteractiveShell, self).init_magics() + # TODO Any additional magics for PyDev? + + +InteractiveShellABC.register(PyDevTerminalInteractiveShell) # @UndefinedVariable + + +#======================================================================================================================= +# _PyDevFrontEnd +#======================================================================================================================= +class _PyDevFrontEnd: + + version = release.__version__ + + def __init__(self): + # Create and initialize our IPython instance. + if hasattr(PyDevTerminalInteractiveShell, '_instance') and PyDevTerminalInteractiveShell._instance is not None: + self.ipython = PyDevTerminalInteractiveShell._instance + else: + self.ipython = PyDevTerminalInteractiveShell.instance() + + self._curr_exec_line = 0 + self._curr_exec_lines = [] + + def show_banner(self): + self.ipython.show_banner() + + def update(self, globals, locals): + ns = self.ipython.user_ns + + for key, value in list(ns.items()): + if key not in locals: + locals[key] = value + + self.ipython.user_global_ns.clear() + self.ipython.user_global_ns.update(globals) + self.ipython.user_ns = locals + + if hasattr(self.ipython, 'history_manager') and hasattr(self.ipython.history_manager, 'save_thread'): + self.ipython.history_manager.save_thread.pydev_do_not_trace = True # don't trace ipython history saving thread + + def complete(self, string): + try: + if string: + return self.ipython.complete(None, line=string, cursor_pos=string.__len__()) + else: + return self.ipython.complete(string, string, 0) + except: + # Silence completer exceptions + pass + + def is_complete(self, string): + # Based on IPython 0.10.1 + + if string in ('', '\n'): + # Prefiltering, eg through ipython0, may return an empty + # string although some operations have been accomplished. We + # thus want to consider an empty string as a complete + # statement. + return True + else: + try: + # Add line returns here, to make sure that the statement is + # complete (except if '\' was used). + # This should probably be done in a different place (like + # maybe 'prefilter_input' method? For now, this works. + clean_string = string.rstrip('\n') + if not clean_string.endswith('\\'): + clean_string += '\n\n' + + is_complete = codeop.compile_command( + clean_string, + "", + "exec" + ) + except Exception: + # XXX: Hack: return True so that the + # code gets executed and the error captured. + is_complete = True + return is_complete + + def getCompletions(self, text, act_tok): + # Get completions from IPython and from PyDev and merge the results + # IPython only gives context free list of completions, while PyDev + # gives detailed information about completions. + try: + TYPE_IPYTHON = '11' + TYPE_IPYTHON_MAGIC = '12' + _line, ipython_completions = self.complete(text) + + from _pydev_bundle._pydev_completer import Completer + completer = Completer(self.get_namespace(), None) + ret = completer.complete(act_tok) + append = ret.append + ip = self.ipython + pydev_completions = set([f[0] for f in ret]) + for ipython_completion in ipython_completions: + + # PyCharm was not expecting completions with '%'... + # Could be fixed in the backend, but it's probably better + # fixing it at PyCharm. + # if ipython_completion.startswith('%'): + # ipython_completion = ipython_completion[1:] + + if ipython_completion not in pydev_completions: + pydev_completions.add(ipython_completion) + inf = ip.object_inspect(ipython_completion) + if inf['type_name'] == 'Magic function': + pydev_type = TYPE_IPYTHON_MAGIC + else: + pydev_type = TYPE_IPYTHON + pydev_doc = inf['docstring'] + if pydev_doc is None: + pydev_doc = '' + append((ipython_completion, pydev_doc, '', pydev_type)) + return ret + except: + import traceback;traceback.print_exc() + return [] + + def get_namespace(self): + return self.ipython.user_ns + + def clear_buffer(self): + del self._curr_exec_lines[:] + + def add_exec(self, line): + if self._curr_exec_lines: + self._curr_exec_lines.append(line) + + buf = '\n'.join(self._curr_exec_lines) + + if self.is_complete(buf): + self._curr_exec_line += 1 + self.ipython.run_cell(buf) + del self._curr_exec_lines[:] + return False # execute complete (no more) + + return True # needs more + else: + + if not self.is_complete(line): + # Did not execute + self._curr_exec_lines.append(line) + return True # needs more + else: + self._curr_exec_line += 1 + self.ipython.run_cell(line, store_history=True) + # hist = self.ipython.history_manager.output_hist_reprs + # rep = hist.get(self._curr_exec_line, None) + # if rep is not None: + # print(rep) + return False # execute complete (no more) + + def is_automagic(self): + return self.ipython.automagic + + def get_greeting_msg(self): + return 'PyDev console: using IPython %s\n' % self.version + + +class _PyDevFrontEndContainer: + _instance = None + _last_host_port = None + + +def get_pydev_frontend(pydev_host, pydev_client_port): + if _PyDevFrontEndContainer._instance is None: + _PyDevFrontEndContainer._instance = _PyDevFrontEnd() + + if _PyDevFrontEndContainer._last_host_port != (pydev_host, pydev_client_port): + _PyDevFrontEndContainer._last_host_port = pydev_host, pydev_client_port + + # Back channel to PyDev to open editors (in the future other + # info may go back this way. This is the same channel that is + # used to get stdin, see StdIn in pydev_console_utils) + _PyDevFrontEndContainer._instance.ipython.hooks['editor'] = create_editor_hook(pydev_host, pydev_client_port) + + # Note: setting the callback directly because setting it with set_hook would actually create a chain instead + # of ovewriting at each new call). + # _PyDevFrontEndContainer._instance.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port)) + + return _PyDevFrontEndContainer._instance + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_is_thread_alive.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_is_thread_alive.py new file mode 100644 index 0000000..d949ba2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_is_thread_alive.py @@ -0,0 +1,23 @@ +from _pydev_bundle._pydev_saved_modules import threading + +# Hack for https://www.brainwy.com/tracker/PyDev/363 (i.e.: calling is_alive() can throw AssertionError under some +# circumstances). +# It is required to debug threads started by start_new_thread in Python 3.4 +_temp = threading.Thread() +if hasattr(_temp, '_is_stopped'): # Python 3.x has this + + def is_thread_alive(t): + return not t._is_stopped + +elif hasattr(_temp, '_Thread__stopped'): # Python 2.x has this + + def is_thread_alive(t): + return not t._Thread__stopped + +else: + + # Jython wraps a native java thread and thus only obeys the public API. + def is_thread_alive(t): + return t.is_alive() + +del _temp diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_localhost.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_localhost.py new file mode 100644 index 0000000..0d2838d --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_localhost.py @@ -0,0 +1,67 @@ +from _pydev_bundle._pydev_saved_modules import socket +import sys + +IS_JYTHON = sys.platform.find('java') != -1 + +_cache = None + + +def get_localhost(): + ''' + Should return 127.0.0.1 in ipv4 and ::1 in ipv6 + + localhost is not used because on windows vista/windows 7, there can be issues where the resolving doesn't work + properly and takes a lot of time (had this issue on the pyunit server). + + Using the IP directly solves the problem. + ''' + # TODO: Needs better investigation! + + global _cache + if _cache is None: + try: + for addr_info in socket.getaddrinfo("localhost", 80, 0, 0, socket.SOL_TCP): + config = addr_info[4] + if config[0] == '127.0.0.1': + _cache = '127.0.0.1' + return _cache + except: + # Ok, some versions of Python don't have getaddrinfo or SOL_TCP... Just consider it 127.0.0.1 in this case. + _cache = '127.0.0.1' + else: + _cache = 'localhost' + + return _cache + + +def get_socket_names(n_sockets, close=False): + socket_names = [] + sockets = [] + for _ in range(n_sockets): + if IS_JYTHON: + # Although the option which would be pure java *should* work for Jython, the socket being returned is still 0 + # (i.e.: it doesn't give the local port bound, only the original port, which was 0). + from java.net import ServerSocket + sock = ServerSocket(0) + socket_name = get_localhost(), sock.getLocalPort() + else: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.bind((get_localhost(), 0)) + socket_name = sock.getsockname() + + sockets.append(sock) + socket_names.append(socket_name) + + if close: + for s in sockets: + s.close() + return socket_names + + +def get_socket_name(close=False): + return get_socket_names(1, close)[0] + + +if __name__ == '__main__': + print(get_socket_name()) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_log.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_log.py new file mode 100644 index 0000000..981b54b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_log.py @@ -0,0 +1,228 @@ +from _pydevd_bundle.pydevd_constants import DebugInfoHolder, SHOW_COMPILE_CYTHON_COMMAND_LINE, NULL, LOG_TIME +from contextlib import contextmanager +import traceback +import os +import sys + + +class _LoggingGlobals(object): + + _warn_once_map = {} + _debug_stream_filename = None + _debug_stream = sys.stderr + _debug_stream_initialized = False + + +def initialize_debug_stream(reinitialize=False): + ''' + :param bool reinitialize: + Reinitialize is used to update the debug stream after a fork (thus, if it wasn't + initialized, we don't need to do anything). + ''' + if reinitialize: + if not _LoggingGlobals._debug_stream_initialized: + return + else: + if _LoggingGlobals._debug_stream_initialized: + return + + _LoggingGlobals._debug_stream_initialized = True + + # Note: we cannot initialize with sys.stderr because when forking we may end up logging things in 'os' calls. + _LoggingGlobals._debug_stream = NULL + _LoggingGlobals._debug_stream_filename = None + + if not DebugInfoHolder.PYDEVD_DEBUG_FILE: + _LoggingGlobals._debug_stream = sys.stderr + else: + # Add pid to the filename. + try: + dirname = os.path.dirname(DebugInfoHolder.PYDEVD_DEBUG_FILE) + basename = os.path.basename(DebugInfoHolder.PYDEVD_DEBUG_FILE) + try: + os.makedirs(dirname) + except: + pass # Ignore error if it already exists. + + name, ext = os.path.splitext(basename) + debug_file = os.path.join(dirname, name + '.' + str(os.getpid()) + ext) + _LoggingGlobals._debug_stream = open(debug_file, 'w') + _LoggingGlobals._debug_stream_filename = debug_file + except: + _LoggingGlobals._debug_stream = sys.stderr + # Don't fail when trying to setup logging, just show the exception. + traceback.print_exc() + + +def list_log_files(pydevd_debug_file): + log_files = [] + dirname = os.path.dirname(pydevd_debug_file) + basename = os.path.basename(pydevd_debug_file) + if os.path.isdir(dirname): + name, ext = os.path.splitext(basename) + for f in os.listdir(dirname): + if f.startswith(name) and f.endswith(ext): + log_files.append(os.path.join(dirname, f)) + return log_files + + +@contextmanager +def log_context(trace_level, stream): + ''' + To be used to temporarily change the logging settings. + ''' + original_trace_level = DebugInfoHolder.DEBUG_TRACE_LEVEL + original_debug_stream = _LoggingGlobals._debug_stream + original_pydevd_debug_file = DebugInfoHolder.PYDEVD_DEBUG_FILE + original_debug_stream_filename = _LoggingGlobals._debug_stream_filename + original_initialized = _LoggingGlobals._debug_stream_initialized + + DebugInfoHolder.DEBUG_TRACE_LEVEL = trace_level + _LoggingGlobals._debug_stream = stream + _LoggingGlobals._debug_stream_initialized = True + try: + yield + finally: + DebugInfoHolder.DEBUG_TRACE_LEVEL = original_trace_level + _LoggingGlobals._debug_stream = original_debug_stream + DebugInfoHolder.PYDEVD_DEBUG_FILE = original_pydevd_debug_file + _LoggingGlobals._debug_stream_filename = original_debug_stream_filename + _LoggingGlobals._debug_stream_initialized = original_initialized + + +import time +_last_log_time = time.time() + + +def _pydevd_log(level, msg, *args): + ''' + Levels are: + + 0 most serious warnings/errors (always printed) + 1 warnings/significant events + 2 informational trace + 3 verbose mode + ''' + if level <= DebugInfoHolder.DEBUG_TRACE_LEVEL: + # yes, we can have errors printing if the console of the program has been finished (and we're still trying to print something) + try: + try: + if args: + msg = msg % args + except: + msg = '%s - %s' % (msg, args) + + if LOG_TIME: + global _last_log_time + new_log_time = time.time() + time_diff = new_log_time - _last_log_time + _last_log_time = new_log_time + msg = '%.2fs - %s\n' % (time_diff, msg,) + else: + msg = '%s\n' % (msg,) + try: + try: + initialize_debug_stream() # Do it as late as possible + _LoggingGlobals._debug_stream.write(msg) + except TypeError: + if isinstance(msg, bytes): + # Depending on the StringIO flavor, it may only accept unicode. + msg = msg.decode('utf-8', 'replace') + _LoggingGlobals._debug_stream.write(msg) + except UnicodeEncodeError: + # When writing to the stream it's possible that the string can't be represented + # in the encoding expected (in this case, convert it to the stream encoding + # or ascii if we can't find one suitable using a suitable replace). + encoding = getattr(_LoggingGlobals._debug_stream, 'encoding', 'ascii') + msg = msg.encode(encoding, 'backslashreplace') + msg = msg.decode(encoding) + _LoggingGlobals._debug_stream.write(msg) + + _LoggingGlobals._debug_stream.flush() + except: + pass + return True + + +def _pydevd_log_exception(msg='', *args): + if msg or args: + _pydevd_log(0, msg, *args) + try: + initialize_debug_stream() # Do it as late as possible + traceback.print_exc(file=_LoggingGlobals._debug_stream) + _LoggingGlobals._debug_stream.flush() + except: + raise + + +def verbose(msg, *args): + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3: + _pydevd_log(3, msg, *args) + + +def debug(msg, *args): + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2: + _pydevd_log(2, msg, *args) + + +def info(msg, *args): + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1: + _pydevd_log(1, msg, *args) + + +warn = info + + +def critical(msg, *args): + _pydevd_log(0, msg, *args) + + +def exception(msg='', *args): + try: + _pydevd_log_exception(msg, *args) + except: + pass # Should never fail (even at interpreter shutdown). + + +error = exception + + +def error_once(msg, *args): + try: + if args: + message = msg % args + else: + message = str(msg) + except: + message = '%s - %s' % (msg, args) + + if message not in _LoggingGlobals._warn_once_map: + _LoggingGlobals._warn_once_map[message] = True + critical(message) + + +def exception_once(msg, *args): + try: + if args: + message = msg % args + else: + message = str(msg) + except: + message = '%s - %s' % (msg, args) + + if message not in _LoggingGlobals._warn_once_map: + _LoggingGlobals._warn_once_map[message] = True + exception(message) + + +def debug_once(msg, *args): + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3: + error_once(msg, *args) + + +def show_compile_cython_command_line(): + if SHOW_COMPILE_CYTHON_COMMAND_LINE: + dirname = os.path.dirname(os.path.dirname(__file__)) + error_once("warning: Debugger speedups using cython not found. Run '\"%s\" \"%s\" build_ext --inplace' to build.", + sys.executable, os.path.join(dirname, 'setup_pydevd_cython.py')) + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey.py new file mode 100644 index 0000000..150718f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey.py @@ -0,0 +1,1181 @@ +# License: EPL +import os +import re +import sys +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_constants import get_global_debugger, IS_WINDOWS, IS_JYTHON, get_current_thread_id, \ + sorted_dict_repr +from _pydev_bundle import pydev_log +from contextlib import contextmanager +from _pydevd_bundle import pydevd_constants +from _pydevd_bundle.pydevd_defaults import PydevdCustomization +import ast + +try: + from pathlib import Path +except ImportError: + Path = None + +#=============================================================================== +# Things that are dependent on having the pydevd debugger +#=============================================================================== + +pydev_src_dir = os.path.dirname(os.path.dirname(__file__)) + +_arg_patch = threading.local() + + +@contextmanager +def skip_subprocess_arg_patch(): + _arg_patch.apply_arg_patching = False + try: + yield + finally: + _arg_patch.apply_arg_patching = True + + +def _get_apply_arg_patching(): + return getattr(_arg_patch, 'apply_arg_patching', True) + + +def _get_setup_updated_with_protocol_and_ppid(setup, is_exec=False): + if setup is None: + setup = {} + setup = setup.copy() + # Discard anything related to the protocol (we'll set the the protocol based on the one + # currently set). + setup.pop(pydevd_constants.ARGUMENT_HTTP_JSON_PROTOCOL, None) + setup.pop(pydevd_constants.ARGUMENT_JSON_PROTOCOL, None) + setup.pop(pydevd_constants.ARGUMENT_QUOTED_LINE_PROTOCOL, None) + + if not is_exec: + # i.e.: The ppid for the subprocess is the current pid. + # If it's an exec, keep it what it was. + setup[pydevd_constants.ARGUMENT_PPID] = os.getpid() + + protocol = pydevd_constants.get_protocol() + if protocol == pydevd_constants.HTTP_JSON_PROTOCOL: + setup[pydevd_constants.ARGUMENT_HTTP_JSON_PROTOCOL] = True + + elif protocol == pydevd_constants.JSON_PROTOCOL: + setup[pydevd_constants.ARGUMENT_JSON_PROTOCOL] = True + + elif protocol == pydevd_constants.QUOTED_LINE_PROTOCOL: + setup[pydevd_constants.ARGUMENT_QUOTED_LINE_PROTOCOL] = True + + elif protocol == pydevd_constants.HTTP_PROTOCOL: + setup[pydevd_constants.ARGUMENT_HTTP_PROTOCOL] = True + + else: + pydev_log.debug('Unexpected protocol: %s', protocol) + return setup + + +class _LastFutureImportFinder(ast.NodeVisitor): + + def __init__(self): + self.last_future_import_found = None + + def visit_ImportFrom(self, node): + if node.module == '__future__': + self.last_future_import_found = node + + +def _get_offset_from_line_col(code, line, col): + offset = 0 + for i, line_contents in enumerate(code.splitlines(True)): + if i == line: + offset += col + return offset + else: + offset += len(line_contents) + + return -1 + + +def _separate_future_imports(code): + ''' + :param code: + The code from where we want to get the __future__ imports (note that it's possible that + there's no such entry). + + :return tuple(str, str): + The return is a tuple(future_import, code). + + If the future import is not available a return such as ('', code) is given, otherwise, the + future import will end with a ';' (so that it can be put right before the pydevd attach + code). + ''' + try: + node = ast.parse(code, '', 'exec') + visitor = _LastFutureImportFinder() + visitor.visit(node) + + if visitor.last_future_import_found is None: + return '', code + + node = visitor.last_future_import_found + offset = -1 + if hasattr(node, 'end_lineno') and hasattr(node, 'end_col_offset'): + # Python 3.8 onwards has these (so, use when possible). + line, col = node.end_lineno, node.end_col_offset + offset = _get_offset_from_line_col(code, line - 1, col) # ast lines are 1-based, make it 0-based. + + else: + # end line/col not available, let's just find the offset and then search + # for the alias from there. + line, col = node.lineno, node.col_offset + offset = _get_offset_from_line_col(code, line - 1, col) # ast lines are 1-based, make it 0-based. + if offset >= 0 and node.names: + from_future_import_name = node.names[-1].name + i = code.find(from_future_import_name, offset) + if i < 0: + offset = -1 + else: + offset = i + len(from_future_import_name) + + if offset >= 0: + for i in range(offset, len(code)): + if code[i] in (' ', '\t', ';', ')', '\n'): + offset += 1 + else: + break + + future_import = code[:offset] + code_remainder = code[offset:] + + # Now, put '\n' lines back into the code remainder (we had to search for + # `\n)`, but in case we just got the `\n`, it should be at the remainder, + # not at the future import. + while future_import.endswith('\n'): + future_import = future_import[:-1] + code_remainder = '\n' + code_remainder + + if not future_import.endswith(';'): + future_import += ';' + return future_import, code_remainder + + # This shouldn't happen... + pydev_log.info('Unable to find line %s in code:\n%r', line, code) + return '', code + + except: + pydev_log.exception('Error getting from __future__ imports from: %r', code) + return '', code + + +def _get_python_c_args(host, port, code, args, setup): + setup = _get_setup_updated_with_protocol_and_ppid(setup) + + # i.e.: We want to make the repr sorted so that it works in tests. + setup_repr = setup if setup is None else (sorted_dict_repr(setup)) + + future_imports = '' + if '__future__' in code: + # If the code has a __future__ import, we need to be able to strip the __future__ + # imports from the code and add them to the start of our code snippet. + future_imports, code = _separate_future_imports(code) + + return ("%simport sys; sys.path.insert(0, r'%s'); import pydevd; pydevd.PydevdCustomization.DEFAULT_PROTOCOL=%r; " + "pydevd.settrace(host=%r, port=%s, suspend=False, trace_only_current_thread=False, patch_multiprocessing=True, access_token=%r, client_access_token=%r, __setup_holder__=%s); " + "%s" + ) % ( + future_imports, + pydev_src_dir, + pydevd_constants.get_protocol(), + host, + port, + setup.get('access-token'), + setup.get('client-access-token'), + setup_repr, + code) + + +def _get_host_port(): + import pydevd + host, port = pydevd.dispatch() + return host, port + + +def _is_managed_arg(arg): + pydevd_py = _get_str_type_compatible(arg, 'pydevd.py') + if arg.endswith(pydevd_py): + return True + return False + + +def _on_forked_process(setup_tracing=True): + pydevd_constants.after_fork() + pydev_log.initialize_debug_stream(reinitialize=True) + + if setup_tracing: + pydev_log.debug('pydevd on forked process: %s', os.getpid()) + + import pydevd + pydevd.threadingCurrentThread().__pydevd_main_thread = True + pydevd.settrace_forked(setup_tracing=setup_tracing) + + +def _on_set_trace_for_new_thread(global_debugger): + if global_debugger is not None: + global_debugger.enable_tracing() + + +def _get_str_type_compatible(s, args): + ''' + This method converts `args` to byte/unicode based on the `s' type. + ''' + if isinstance(args, (list, tuple)): + ret = [] + for arg in args: + if type(s) == type(arg): + ret.append(arg) + else: + if isinstance(s, bytes): + ret.append(arg.encode('utf-8')) + else: + ret.append(arg.decode('utf-8')) + return ret + else: + if type(s) == type(args): + return args + else: + if isinstance(s, bytes): + return args.encode('utf-8') + else: + return args.decode('utf-8') + + +#=============================================================================== +# Things related to monkey-patching +#=============================================================================== +def is_python(path): + single_quote, double_quote = _get_str_type_compatible(path, ["'", '"']) + + if path.endswith(single_quote) or path.endswith(double_quote): + path = path[1:len(path) - 1] + filename = os.path.basename(path).lower() + for name in _get_str_type_compatible(filename, ['python', 'jython', 'pypy']): + if filename.find(name) != -1: + return True + + return False + + +class InvalidTypeInArgsException(Exception): + pass + + +def remove_quotes_from_args(args): + if sys.platform == "win32": + new_args = [] + + for x in args: + if Path is not None and isinstance(x, Path): + x = str(x) + else: + if not isinstance(x, (bytes, str)): + raise InvalidTypeInArgsException(str(type(x))) + + double_quote, two_double_quotes = _get_str_type_compatible(x, ['"', '""']) + + if x != two_double_quotes: + if len(x) > 1 and x.startswith(double_quote) and x.endswith(double_quote): + x = x[1:-1] + + new_args.append(x) + return new_args + else: + new_args = [] + for x in args: + if Path is not None and isinstance(x, Path): + x = x.as_posix() + else: + if not isinstance(x, (bytes, str)): + raise InvalidTypeInArgsException(str(type(x))) + new_args.append(x) + + return new_args + + +def quote_arg_win32(arg): + fix_type = lambda x: _get_str_type_compatible(arg, x) + + # See if we need to quote at all - empty strings need quoting, as do strings + # with whitespace or quotes in them. Backslashes do not need quoting. + if arg and not set(arg).intersection(fix_type(' "\t\n\v')): + return arg + + # Per https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw, + # the standard way to interpret arguments in double quotes is as follows: + # + # 2N backslashes followed by a quotation mark produce N backslashes followed by + # begin/end quote. This does not become part of the parsed argument, but toggles + # the "in quotes" mode. + # + # 2N+1 backslashes followed by a quotation mark again produce N backslashes followed + # by a quotation mark literal ("). This does not toggle the "in quotes" mode. + # + # N backslashes not followed by a quotation mark simply produce N backslashes. + # + # This code needs to do the reverse transformation, thus: + # + # N backslashes followed by " produce 2N+1 backslashes followed by " + # + # N backslashes at the end (i.e. where the closing " goes) produce 2N backslashes. + # + # N backslashes in any other position remain as is. + + arg = re.sub(fix_type(r'(\\*)\"'), fix_type(r'\1\1\\"'), arg) + arg = re.sub(fix_type(r'(\\*)$'), fix_type(r'\1\1'), arg) + return fix_type('"') + arg + fix_type('"') + + +def quote_args(args): + if sys.platform == "win32": + return list(map(quote_arg_win32, args)) + else: + return args + + +def patch_args(args, is_exec=False): + ''' + :param list args: + Arguments to patch. + + :param bool is_exec: + If it's an exec, the current process will be replaced (this means we have + to keep the same ppid). + ''' + try: + pydev_log.debug("Patching args: %s", args) + original_args = args + try: + unquoted_args = remove_quotes_from_args(args) + except InvalidTypeInArgsException as e: + pydev_log.info('Unable to monkey-patch subprocess arguments because a type found in the args is invalid: %s', e) + return original_args + + # Internally we should reference original_args (if we want to return them) or unquoted_args + # to add to the list which will be then quoted in the end. + del args + + from pydevd import SetupHolder + if not unquoted_args: + return original_args + + if not is_python(unquoted_args[0]): + pydev_log.debug("Process is not python, returning.") + return original_args + + # Note: we create a copy as string to help with analyzing the arguments, but + # the final list should have items from the unquoted_args as they were initially. + args_as_str = _get_str_type_compatible('', unquoted_args) + + params_with_value_in_separate_arg = ( + '--check-hash-based-pycs', + '--jit' # pypy option + ) + + # All short switches may be combined together. The ones below require a value and the + # value itself may be embedded in the arg. + # + # i.e.: Python accepts things as: + # + # python -OQold -qmtest + # + # Which is the same as: + # + # python -O -Q old -q -m test + # + # or even: + # + # python -OQold "-vcimport sys;print(sys)" + # + # Which is the same as: + # + # python -O -Q old -v -c "import sys;print(sys)" + + params_with_combinable_arg = set(('W', 'X', 'Q', 'c', 'm')) + + module_name = None + before_module_flag = '' + module_name_i_start = -1 + module_name_i_end = -1 + + code = None + code_i = -1 + code_i_end = -1 + code_flag = '' + + filename = None + filename_i = -1 + + ignore_next = True # start ignoring the first (the first entry is the python executable) + for i, arg_as_str in enumerate(args_as_str): + if ignore_next: + ignore_next = False + continue + + if arg_as_str.startswith('-'): + if arg_as_str == '-': + # Contents will be read from the stdin. This is not currently handled. + pydev_log.debug('Unable to fix arguments to attach debugger on subprocess when reading from stdin ("python ... -").') + return original_args + + if arg_as_str.startswith(params_with_value_in_separate_arg): + if arg_as_str in params_with_value_in_separate_arg: + ignore_next = True + continue + + break_out = False + for j, c in enumerate(arg_as_str): + + # i.e.: Python supports -X faulthandler as well as -Xfaulthandler + # (in one case we have to ignore the next and in the other we don't + # have to ignore it). + if c in params_with_combinable_arg: + remainder = arg_as_str[j + 1:] + if not remainder: + ignore_next = True + + if c == 'm': + # i.e.: Something as + # python -qm test + # python -m test + # python -qmtest + before_module_flag = arg_as_str[:j] # before_module_flag would then be "-q" + if before_module_flag == '-': + before_module_flag = '' + module_name_i_start = i + if not remainder: + module_name = unquoted_args[i + 1] + module_name_i_end = i + 1 + else: + # i.e.: python -qmtest should provide 'test' as the module_name + module_name = unquoted_args[i][j + 1:] + module_name_i_end = module_name_i_start + break_out = True + break + + elif c == 'c': + # i.e.: Something as + # python -qc "import sys" + # python -c "import sys" + # python "-qcimport sys" + code_flag = arg_as_str[:j + 1] # code_flag would then be "-qc" + + if not remainder: + # arg_as_str is something as "-qc", "import sys" + code = unquoted_args[i + 1] + code_i_end = i + 2 + else: + # if arg_as_str is something as "-qcimport sys" + code = remainder # code would be "import sys" + code_i_end = i + 1 + code_i = i + break_out = True + break + + else: + break + + if break_out: + break + + else: + # It doesn't start with '-' and we didn't ignore this entry: + # this means that this is the file to be executed. + filename = unquoted_args[i] + + # Note that the filename is not validated here. + # There are cases where even a .exe is valid (xonsh.exe): + # https://github.com/microsoft/debugpy/issues/945 + # So, we should support whatever runpy.run_path + # supports in this case. + + filename_i = i + + if _is_managed_arg(filename): # no need to add pydevd twice + pydev_log.debug('Skipped monkey-patching as pydevd.py is in args already.') + return original_args + + break + else: + # We didn't find the filename (something is unexpected). + pydev_log.debug('Unable to fix arguments to attach debugger on subprocess (filename not found).') + return original_args + + if code_i != -1: + host, port = _get_host_port() + + if port is not None: + new_args = [] + new_args.extend(unquoted_args[:code_i]) + new_args.append(code_flag) + new_args.append(_get_python_c_args(host, port, code, unquoted_args, SetupHolder.setup)) + new_args.extend(unquoted_args[code_i_end:]) + + return quote_args(new_args) + + first_non_vm_index = max(filename_i, module_name_i_start) + if first_non_vm_index == -1: + pydev_log.debug('Unable to fix arguments to attach debugger on subprocess (could not resolve filename nor module name).') + return original_args + + # Original args should be something as: + # ['X:\\pysrc\\pydevd.py', '--multiprocess', '--print-in-debugger-startup', + # '--vm_type', 'python', '--client', '127.0.0.1', '--port', '56352', '--file', 'x:\\snippet1.py'] + from _pydevd_bundle.pydevd_command_line_handling import setup_to_argv + new_args = [] + new_args.extend(unquoted_args[:first_non_vm_index]) + if before_module_flag: + new_args.append(before_module_flag) + + add_module_at = len(new_args) + 1 + + new_args.extend(setup_to_argv( + _get_setup_updated_with_protocol_and_ppid(SetupHolder.setup, is_exec=is_exec), + skip_names=set(('module', 'cmd-line')) + )) + new_args.append('--file') + + if module_name is not None: + assert module_name_i_start != -1 + assert module_name_i_end != -1 + # Always after 'pydevd' (i.e.: pydevd "--module" --multiprocess ...) + new_args.insert(add_module_at, '--module') + new_args.append(module_name) + new_args.extend(unquoted_args[module_name_i_end + 1:]) + + elif filename is not None: + assert filename_i != -1 + new_args.append(filename) + new_args.extend(unquoted_args[filename_i + 1:]) + + else: + raise AssertionError('Internal error (unexpected condition)') + + return quote_args(new_args) + except: + pydev_log.exception('Error patching args (debugger not attached to subprocess).') + return original_args + + +def str_to_args_windows(args): + # See https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments. + # + # Implemetation ported from DebugPlugin.parseArgumentsWindows: + # https://github.com/eclipse/eclipse.platform.debug/blob/master/org.eclipse.debug.core/core/org/eclipse/debug/core/DebugPlugin.java + + result = [] + + DEFAULT = 0 + ARG = 1 + IN_DOUBLE_QUOTE = 2 + + state = DEFAULT + backslashes = 0 + buf = '' + + args_len = len(args) + for i in range(args_len): + ch = args[i] + if (ch == '\\'): + backslashes += 1 + continue + elif (backslashes != 0): + if ch == '"': + while backslashes >= 2: + backslashes -= 2 + buf += '\\' + if (backslashes == 1): + if (state == DEFAULT): + state = ARG + + buf += '"' + backslashes = 0 + continue + # else fall through to switch + else: + # false alarm, treat passed backslashes literally... + if (state == DEFAULT): + state = ARG + + while backslashes > 0: + backslashes -= 1 + buf += '\\' + # fall through to switch + if ch in (' ', '\t'): + if (state == DEFAULT): + # skip + continue + elif (state == ARG): + state = DEFAULT + result.append(buf) + buf = '' + continue + + if state in (DEFAULT, ARG): + if ch == '"': + state = IN_DOUBLE_QUOTE + else: + state = ARG + buf += ch + + elif state == IN_DOUBLE_QUOTE: + if ch == '"': + if (i + 1 < args_len and args[i + 1] == '"'): + # Undocumented feature in Windows: + # Two consecutive double quotes inside a double-quoted argument are interpreted as + # a single double quote. + buf += '"' + i += 1 + else: + state = ARG + else: + buf += ch + + else: + raise RuntimeError('Illegal condition') + + if len(buf) > 0 or state != DEFAULT: + result.append(buf) + + return result + + +def patch_arg_str_win(arg_str): + args = str_to_args_windows(arg_str) + # Fix https://youtrack.jetbrains.com/issue/PY-9767 (args may be empty) + if not args or not is_python(args[0]): + return arg_str + arg_str = ' '.join(patch_args(args)) + pydev_log.debug("New args: %s", arg_str) + return arg_str + + +def monkey_patch_module(module, funcname, create_func): + if hasattr(module, funcname): + original_name = 'original_' + funcname + if not hasattr(module, original_name): + setattr(module, original_name, getattr(module, funcname)) + setattr(module, funcname, create_func(original_name)) + + +def monkey_patch_os(funcname, create_func): + monkey_patch_module(os, funcname, create_func) + + +def warn_multiproc(): + pass # TODO: Provide logging as messages to the IDE. + # pydev_log.error_once( + # "pydev debugger: New process is launching (breakpoints won't work in the new process).\n" + # "pydev debugger: To debug that process please enable 'Attach to subprocess automatically while debugging?' option in the debugger settings.\n") + # + + +def create_warn_multiproc(original_name): + + def new_warn_multiproc(*args, **kwargs): + import os + + warn_multiproc() + + return getattr(os, original_name)(*args, **kwargs) + + return new_warn_multiproc + + +def create_execl(original_name): + + def new_execl(path, *args): + """ + os.execl(path, arg0, arg1, ...) + os.execle(path, arg0, arg1, ..., env) + os.execlp(file, arg0, arg1, ...) + os.execlpe(file, arg0, arg1, ..., env) + """ + if _get_apply_arg_patching(): + args = patch_args(args, is_exec=True) + send_process_created_message() + send_process_about_to_be_replaced() + + return getattr(os, original_name)(path, *args) + + return new_execl + + +def create_execv(original_name): + + def new_execv(path, args): + """ + os.execv(path, args) + os.execvp(file, args) + """ + if _get_apply_arg_patching(): + args = patch_args(args, is_exec=True) + send_process_created_message() + send_process_about_to_be_replaced() + + return getattr(os, original_name)(path, args) + + return new_execv + + +def create_execve(original_name): + """ + os.execve(path, args, env) + os.execvpe(file, args, env) + """ + + def new_execve(path, args, env): + if _get_apply_arg_patching(): + args = patch_args(args, is_exec=True) + send_process_created_message() + send_process_about_to_be_replaced() + + return getattr(os, original_name)(path, args, env) + + return new_execve + + +def create_spawnl(original_name): + + def new_spawnl(mode, path, *args): + """ + os.spawnl(mode, path, arg0, arg1, ...) + os.spawnlp(mode, file, arg0, arg1, ...) + """ + if _get_apply_arg_patching(): + args = patch_args(args) + send_process_created_message() + + return getattr(os, original_name)(mode, path, *args) + + return new_spawnl + + +def create_spawnv(original_name): + + def new_spawnv(mode, path, args): + """ + os.spawnv(mode, path, args) + os.spawnvp(mode, file, args) + """ + if _get_apply_arg_patching(): + args = patch_args(args) + send_process_created_message() + + return getattr(os, original_name)(mode, path, args) + + return new_spawnv + + +def create_spawnve(original_name): + """ + os.spawnve(mode, path, args, env) + os.spawnvpe(mode, file, args, env) + """ + + def new_spawnve(mode, path, args, env): + if _get_apply_arg_patching(): + args = patch_args(args) + send_process_created_message() + + return getattr(os, original_name)(mode, path, args, env) + + return new_spawnve + + +def create_posix_spawn(original_name): + """ + os.posix_spawn(executable, args, env, **kwargs) + """ + + def new_posix_spawn(executable, args, env, **kwargs): + if _get_apply_arg_patching(): + args = patch_args(args) + send_process_created_message() + + return getattr(os, original_name)(executable, args, env, **kwargs) + + return new_posix_spawn + + +def create_fork_exec(original_name): + """ + _posixsubprocess.fork_exec(args, executable_list, close_fds, ... (13 more)) + """ + + def new_fork_exec(args, *other_args): + import _posixsubprocess # @UnresolvedImport + if _get_apply_arg_patching(): + args = patch_args(args) + send_process_created_message() + + return getattr(_posixsubprocess, original_name)(args, *other_args) + + return new_fork_exec + + +def create_warn_fork_exec(original_name): + """ + _posixsubprocess.fork_exec(args, executable_list, close_fds, ... (13 more)) + """ + + def new_warn_fork_exec(*args): + try: + import _posixsubprocess + warn_multiproc() + return getattr(_posixsubprocess, original_name)(*args) + except: + pass + + return new_warn_fork_exec + + +def create_CreateProcess(original_name): + """ + CreateProcess(*args, **kwargs) + """ + + def new_CreateProcess(app_name, cmd_line, *args): + try: + import _subprocess + except ImportError: + import _winapi as _subprocess + + if _get_apply_arg_patching(): + cmd_line = patch_arg_str_win(cmd_line) + send_process_created_message() + + return getattr(_subprocess, original_name)(app_name, cmd_line, *args) + + return new_CreateProcess + + +def create_CreateProcessWarnMultiproc(original_name): + """ + CreateProcess(*args, **kwargs) + """ + + def new_CreateProcess(*args): + try: + import _subprocess + except ImportError: + import _winapi as _subprocess + warn_multiproc() + return getattr(_subprocess, original_name)(*args) + + return new_CreateProcess + + +def create_fork(original_name): + + def new_fork(): + # A simple fork will result in a new python process + is_new_python_process = True + frame = sys._getframe() + + apply_arg_patch = _get_apply_arg_patching() + + is_subprocess_fork = False + while frame is not None: + if frame.f_code.co_name == '_execute_child' and 'subprocess' in frame.f_code.co_filename: + is_subprocess_fork = True + # If we're actually in subprocess.Popen creating a child, it may + # result in something which is not a Python process, (so, we + # don't want to connect with it in the forked version). + executable = frame.f_locals.get('executable') + if executable is not None: + is_new_python_process = False + if is_python(executable): + is_new_python_process = True + break + + frame = frame.f_back + frame = None # Just make sure we don't hold on to it. + + protocol = pydevd_constants.get_protocol() + + child_process = getattr(os, original_name)() # fork + if not child_process: + if is_new_python_process: + PydevdCustomization.DEFAULT_PROTOCOL = protocol + _on_forked_process(setup_tracing=apply_arg_patch and not is_subprocess_fork) + else: + if is_new_python_process: + send_process_created_message() + return child_process + + return new_fork + + +def send_process_created_message(): + py_db = get_global_debugger() + if py_db is not None: + py_db.send_process_created_message() + + +def send_process_about_to_be_replaced(): + py_db = get_global_debugger() + if py_db is not None: + py_db.send_process_about_to_be_replaced() + + +def patch_new_process_functions(): + # os.execl(path, arg0, arg1, ...) + # os.execle(path, arg0, arg1, ..., env) + # os.execlp(file, arg0, arg1, ...) + # os.execlpe(file, arg0, arg1, ..., env) + # os.execv(path, args) + # os.execve(path, args, env) + # os.execvp(file, args) + # os.execvpe(file, args, env) + monkey_patch_os('execl', create_execl) + monkey_patch_os('execle', create_execl) + monkey_patch_os('execlp', create_execl) + monkey_patch_os('execlpe', create_execl) + monkey_patch_os('execv', create_execv) + monkey_patch_os('execve', create_execve) + monkey_patch_os('execvp', create_execv) + monkey_patch_os('execvpe', create_execve) + + # os.spawnl(mode, path, ...) + # os.spawnle(mode, path, ..., env) + # os.spawnlp(mode, file, ...) + # os.spawnlpe(mode, file, ..., env) + # os.spawnv(mode, path, args) + # os.spawnve(mode, path, args, env) + # os.spawnvp(mode, file, args) + # os.spawnvpe(mode, file, args, env) + + monkey_patch_os('spawnl', create_spawnl) + monkey_patch_os('spawnle', create_spawnl) + monkey_patch_os('spawnlp', create_spawnl) + monkey_patch_os('spawnlpe', create_spawnl) + monkey_patch_os('spawnv', create_spawnv) + monkey_patch_os('spawnve', create_spawnve) + monkey_patch_os('spawnvp', create_spawnv) + monkey_patch_os('spawnvpe', create_spawnve) + monkey_patch_os('posix_spawn', create_posix_spawn) + + if not IS_JYTHON: + if not IS_WINDOWS: + monkey_patch_os('fork', create_fork) + try: + import _posixsubprocess + monkey_patch_module(_posixsubprocess, 'fork_exec', create_fork_exec) + except ImportError: + pass + else: + # Windows + try: + import _subprocess + except ImportError: + import _winapi as _subprocess + monkey_patch_module(_subprocess, 'CreateProcess', create_CreateProcess) + + +def patch_new_process_functions_with_warning(): + monkey_patch_os('execl', create_warn_multiproc) + monkey_patch_os('execle', create_warn_multiproc) + monkey_patch_os('execlp', create_warn_multiproc) + monkey_patch_os('execlpe', create_warn_multiproc) + monkey_patch_os('execv', create_warn_multiproc) + monkey_patch_os('execve', create_warn_multiproc) + monkey_patch_os('execvp', create_warn_multiproc) + monkey_patch_os('execvpe', create_warn_multiproc) + monkey_patch_os('spawnl', create_warn_multiproc) + monkey_patch_os('spawnle', create_warn_multiproc) + monkey_patch_os('spawnlp', create_warn_multiproc) + monkey_patch_os('spawnlpe', create_warn_multiproc) + monkey_patch_os('spawnv', create_warn_multiproc) + monkey_patch_os('spawnve', create_warn_multiproc) + monkey_patch_os('spawnvp', create_warn_multiproc) + monkey_patch_os('spawnvpe', create_warn_multiproc) + monkey_patch_os('posix_spawn', create_warn_multiproc) + + if not IS_JYTHON: + if not IS_WINDOWS: + monkey_patch_os('fork', create_warn_multiproc) + try: + import _posixsubprocess + monkey_patch_module(_posixsubprocess, 'fork_exec', create_warn_fork_exec) + except ImportError: + pass + else: + # Windows + try: + import _subprocess + except ImportError: + import _winapi as _subprocess + monkey_patch_module(_subprocess, 'CreateProcess', create_CreateProcessWarnMultiproc) + + +class _NewThreadStartupWithTrace: + + def __init__(self, original_func, args, kwargs): + self.original_func = original_func + self.args = args + self.kwargs = kwargs + + def __call__(self): + # We monkey-patch the thread creation so that this function is called in the new thread. At this point + # we notify of its creation and start tracing it. + py_db = get_global_debugger() + + thread_id = None + if py_db is not None: + # Note: if this is a thread from threading.py, we're too early in the boostrap process (because we mocked + # the start_new_thread internal machinery and thread._bootstrap has not finished), so, the code below needs + # to make sure that we use the current thread bound to the original function and not use + # threading.current_thread() unless we're sure it's a dummy thread. + t = getattr(self.original_func, '__self__', getattr(self.original_func, 'im_self', None)) + if not isinstance(t, threading.Thread): + # This is not a threading.Thread but a Dummy thread (so, get it as a dummy thread using + # currentThread). + t = threading.current_thread() + + if not getattr(t, 'is_pydev_daemon_thread', False): + thread_id = get_current_thread_id(t) + py_db.notify_thread_created(thread_id, t) + _on_set_trace_for_new_thread(py_db) + + if getattr(py_db, 'thread_analyser', None) is not None: + try: + from _pydevd_bundle.pydevd_concurrency_analyser.pydevd_concurrency_logger import log_new_thread + log_new_thread(py_db, t) + except: + sys.stderr.write("Failed to detect new thread for visualization") + try: + ret = self.original_func(*self.args, **self.kwargs) + finally: + if thread_id is not None: + if py_db is not None: + # At thread shutdown we only have pydevd-related code running (which shouldn't + # be tracked). + py_db.disable_tracing() + py_db.notify_thread_not_alive(thread_id) + + return ret + + +class _NewThreadStartupWithoutTrace: + + def __init__(self, original_func, args, kwargs): + self.original_func = original_func + self.args = args + self.kwargs = kwargs + + def __call__(self): + return self.original_func(*self.args, **self.kwargs) + + +_UseNewThreadStartup = _NewThreadStartupWithTrace + + +def _get_threading_modules_to_patch(): + threading_modules_to_patch = [] + + try: + import thread as _thread + except: + import _thread + threading_modules_to_patch.append(_thread) + threading_modules_to_patch.append(threading) + + return threading_modules_to_patch + + +threading_modules_to_patch = _get_threading_modules_to_patch() + + +def patch_thread_module(thread_module): + + if getattr(thread_module, '_original_start_new_thread', None) is None: + if thread_module is threading: + if not hasattr(thread_module, '_start_new_thread'): + return # Jython doesn't have it. + _original_start_new_thread = thread_module._original_start_new_thread = thread_module._start_new_thread + else: + _original_start_new_thread = thread_module._original_start_new_thread = thread_module.start_new_thread + else: + _original_start_new_thread = thread_module._original_start_new_thread + + class ClassWithPydevStartNewThread: + + def pydev_start_new_thread(self, function, args=(), kwargs={}): + ''' + We need to replace the original thread_module.start_new_thread with this function so that threads started + through it and not through the threading module are properly traced. + ''' + return _original_start_new_thread(_UseNewThreadStartup(function, args, kwargs), ()) + + # This is a hack for the situation where the thread_module.start_new_thread is declared inside a class, such as the one below + # class F(object): + # start_new_thread = thread_module.start_new_thread + # + # def start_it(self): + # self.start_new_thread(self.function, args, kwargs) + # So, if it's an already bound method, calling self.start_new_thread won't really receive a different 'self' -- it + # does work in the default case because in builtins self isn't passed either. + pydev_start_new_thread = ClassWithPydevStartNewThread().pydev_start_new_thread + + try: + # We need to replace the original thread_module.start_new_thread with this function so that threads started through + # it and not through the threading module are properly traced. + if thread_module is threading: + thread_module._start_new_thread = pydev_start_new_thread + else: + thread_module.start_new_thread = pydev_start_new_thread + thread_module.start_new = pydev_start_new_thread + except: + pass + + +def patch_thread_modules(): + for t in threading_modules_to_patch: + patch_thread_module(t) + + +def undo_patch_thread_modules(): + for t in threading_modules_to_patch: + try: + t.start_new_thread = t._original_start_new_thread + except: + pass + + try: + t.start_new = t._original_start_new_thread + except: + pass + + try: + t._start_new_thread = t._original_start_new_thread + except: + pass + + +def disable_trace_thread_modules(): + ''' + Can be used to temporarily stop tracing threads created with thread.start_new_thread. + ''' + global _UseNewThreadStartup + _UseNewThreadStartup = _NewThreadStartupWithoutTrace + + +def enable_trace_thread_modules(): + ''' + Can be used to start tracing threads created with thread.start_new_thread again. + ''' + global _UseNewThreadStartup + _UseNewThreadStartup = _NewThreadStartupWithTrace + + +def get_original_start_new_thread(threading_module): + try: + return threading_module._original_start_new_thread + except: + return threading_module.start_new_thread diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey_qt.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey_qt.py new file mode 100644 index 0000000..e348b84 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey_qt.py @@ -0,0 +1,216 @@ +from __future__ import nested_scopes + +from _pydev_bundle._pydev_saved_modules import threading +import os +from _pydev_bundle import pydev_log + + +def set_trace_in_qt(): + from _pydevd_bundle.pydevd_comm import get_global_debugger + py_db = get_global_debugger() + if py_db is not None: + threading.current_thread() # Create the dummy thread for qt. + py_db.enable_tracing() + + +_patched_qt = False + + +def patch_qt(qt_support_mode): + ''' + This method patches qt (PySide2, PySide, PyQt4, PyQt5) so that we have hooks to set the tracing for QThread. + ''' + if not qt_support_mode: + return + + if qt_support_mode is True or qt_support_mode == 'True': + # do not break backward compatibility + qt_support_mode = 'auto' + + if qt_support_mode == 'auto': + qt_support_mode = os.getenv('PYDEVD_PYQT_MODE', 'auto') + + # Avoid patching more than once + global _patched_qt + if _patched_qt: + return + + pydev_log.debug('Qt support mode: %s', qt_support_mode) + + _patched_qt = True + + if qt_support_mode == 'auto': + + patch_qt_on_import = None + try: + import PySide2 # @UnresolvedImport @UnusedImport + qt_support_mode = 'pyside2' + except: + try: + import Pyside # @UnresolvedImport @UnusedImport + qt_support_mode = 'pyside' + except: + try: + import PyQt5 # @UnresolvedImport @UnusedImport + qt_support_mode = 'pyqt5' + except: + try: + import PyQt4 # @UnresolvedImport @UnusedImport + qt_support_mode = 'pyqt4' + except: + return + + if qt_support_mode == 'pyside2': + try: + import PySide2.QtCore # @UnresolvedImport + _internal_patch_qt(PySide2.QtCore, qt_support_mode) + except: + return + + elif qt_support_mode == 'pyside': + try: + import PySide.QtCore # @UnresolvedImport + _internal_patch_qt(PySide.QtCore, qt_support_mode) + except: + return + + elif qt_support_mode == 'pyqt5': + try: + import PyQt5.QtCore # @UnresolvedImport + _internal_patch_qt(PyQt5.QtCore) + except: + return + + elif qt_support_mode == 'pyqt4': + # Ok, we have an issue here: + # PyDev-452: Selecting PyQT API version using sip.setapi fails in debug mode + # http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html + # Mostly, if the user uses a different API version (i.e.: v2 instead of v1), + # that has to be done before importing PyQt4 modules (PySide/PyQt5 don't have this issue + # as they only implements v2). + patch_qt_on_import = 'PyQt4' + + def get_qt_core_module(): + import PyQt4.QtCore # @UnresolvedImport + return PyQt4.QtCore + + _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module) + + else: + raise ValueError('Unexpected qt support mode: %s' % (qt_support_mode,)) + + +def _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module): + # I don't like this approach very much as we have to patch __import__, but I like even less + # asking the user to configure something in the client side... + # So, our approach is to patch PyQt4 right before the user tries to import it (at which + # point he should've set the sip api version properly already anyways). + + pydev_log.debug('Setting up Qt post-import monkeypatch.') + + dotted = patch_qt_on_import + '.' + original_import = __import__ + + from _pydev_bundle._pydev_sys_patch import patch_sys_module, patch_reload, cancel_patches_in_sys_module + + patch_sys_module() + patch_reload() + + def patched_import(name, *args, **kwargs): + if patch_qt_on_import == name or name.startswith(dotted): + builtins.__import__ = original_import + cancel_patches_in_sys_module() + _internal_patch_qt(get_qt_core_module()) # Patch it only when the user would import the qt module + return original_import(name, *args, **kwargs) + + import builtins # Py3 + + builtins.__import__ = patched_import + + +def _internal_patch_qt(QtCore, qt_support_mode='auto'): + pydev_log.debug('Patching Qt: %s', QtCore) + + _original_thread_init = QtCore.QThread.__init__ + _original_runnable_init = QtCore.QRunnable.__init__ + _original_QThread = QtCore.QThread + + class FuncWrapper: + + def __init__(self, original): + self._original = original + + def __call__(self, *args, **kwargs): + set_trace_in_qt() + return self._original(*args, **kwargs) + + class StartedSignalWrapper(QtCore.QObject): # Wrapper for the QThread.started signal + + try: + _signal = QtCore.Signal() # @UndefinedVariable + except: + _signal = QtCore.pyqtSignal() # @UndefinedVariable + + def __init__(self, thread, original_started): + QtCore.QObject.__init__(self) + self.thread = thread + self.original_started = original_started + if qt_support_mode in ('pyside', 'pyside2'): + self._signal = original_started + else: + self._signal.connect(self._on_call) + self.original_started.connect(self._signal) + + def connect(self, func, *args, **kwargs): + if qt_support_mode in ('pyside', 'pyside2'): + return self._signal.connect(FuncWrapper(func), *args, **kwargs) + else: + return self._signal.connect(func, *args, **kwargs) + + def disconnect(self, *args, **kwargs): + return self._signal.disconnect(*args, **kwargs) + + def emit(self, *args, **kwargs): + return self._signal.emit(*args, **kwargs) + + def _on_call(self, *args, **kwargs): + set_trace_in_qt() + + class ThreadWrapper(QtCore.QThread): # Wrapper for QThread + + def __init__(self, *args, **kwargs): + _original_thread_init(self, *args, **kwargs) + + # In PyQt5 the program hangs when we try to call original run method of QThread class. + # So we need to distinguish instances of QThread class and instances of QThread inheritors. + if self.__class__.run == _original_QThread.run: + self.run = self._exec_run + else: + self._original_run = self.run + self.run = self._new_run + self._original_started = self.started + self.started = StartedSignalWrapper(self, self.started) + + def _exec_run(self): + set_trace_in_qt() + self.exec_() + return None + + def _new_run(self): + set_trace_in_qt() + return self._original_run() + + class RunnableWrapper(QtCore.QRunnable): # Wrapper for QRunnable + + def __init__(self, *args, **kwargs): + _original_runnable_init(self, *args, **kwargs) + + self._original_run = self.run + self.run = self._new_run + + def _new_run(self): + set_trace_in_qt() + return self._original_run() + + QtCore.QThread = ThreadWrapper + QtCore.QRunnable = RunnableWrapper diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_override.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_override.py new file mode 120000 index 0000000..08eec60 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_override.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/94/bd/ed/1929d0b338b3b5bc9e3f2ae4f5ebbec66800594d98430c6dbf6b77b68e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_umd.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_umd.py new file mode 100644 index 0000000..134ce4c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_umd.py @@ -0,0 +1,180 @@ +""" +The UserModuleDeleter and runfile methods are copied from +Spyder and carry their own license agreement. +http://code.google.com/p/spyderlib/source/browse/spyderlib/widgets/externalshell/sitecustomize.py + +Spyder License Agreement (MIT License) +-------------------------------------- + +Copyright (c) 2009-2012 Pierre Raybaut + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. +""" + +import sys +import os +from _pydev_bundle._pydev_execfile import execfile + + +# The following classes and functions are mainly intended to be used from +# an interactive Python session +class UserModuleDeleter: + """ + User Module Deleter (UMD) aims at deleting user modules + to force Python to deeply reload them during import + + pathlist [list]: ignore list in terms of module path + namelist [list]: ignore list in terms of module name + """ + + def __init__(self, namelist=None, pathlist=None): + if namelist is None: + namelist = [] + self.namelist = namelist + if pathlist is None: + pathlist = [] + self.pathlist = pathlist + try: + # ignore all files in org.python.pydev/pysrc + import pydev_pysrc, inspect + self.pathlist.append(os.path.dirname(pydev_pysrc.__file__)) + except: + pass + self.previous_modules = list(sys.modules.keys()) + + def is_module_ignored(self, modname, modpath): + for path in [sys.prefix] + self.pathlist: + if modpath.startswith(path): + return True + else: + return set(modname.split('.')) & set(self.namelist) + + def run(self, verbose=False): + """ + Del user modules to force Python to deeply reload them + + Do not del modules which are considered as system modules, i.e. + modules installed in subdirectories of Python interpreter's binary + Do not del C modules + """ + log = [] + modules_copy = dict(sys.modules) + for modname, module in modules_copy.items(): + if modname == 'aaaaa': + print(modname, module) + print(self.previous_modules) + if modname not in self.previous_modules: + modpath = getattr(module, '__file__', None) + if modpath is None: + # *module* is a C module that is statically linked into the + # interpreter. There is no way to know its path, so we + # choose to ignore it. + continue + if not self.is_module_ignored(modname, modpath): + log.append(modname) + del sys.modules[modname] + if verbose and log: + print("\x1b[4;33m%s\x1b[24m%s\x1b[0m" % ("UMD has deleted", + ": " + ", ".join(log))) + + +__umd__ = None + +_get_globals_callback = None + + +def _set_globals_function(get_globals): + global _get_globals_callback + _get_globals_callback = get_globals + + +def _get_globals(): + """Return current Python interpreter globals namespace""" + if _get_globals_callback is not None: + return _get_globals_callback() + else: + try: + from __main__ import __dict__ as namespace + except ImportError: + try: + # The import fails on IronPython + import __main__ + namespace = __main__.__dict__ + except: + namespace + shell = namespace.get('__ipythonshell__') + if shell is not None and hasattr(shell, 'user_ns'): + # IPython 0.12+ kernel + return shell.user_ns + else: + # Python interpreter + return namespace + return namespace + + +def runfile(filename, args=None, wdir=None, namespace=None): + """ + Run filename + args: command line arguments (string) + wdir: working directory + """ + try: + if hasattr(filename, 'decode'): + filename = filename.decode('utf-8') + except (UnicodeError, TypeError): + pass + global __umd__ + if os.environ.get("PYDEV_UMD_ENABLED", "").lower() == "true": + if __umd__ is None: + namelist = os.environ.get("PYDEV_UMD_NAMELIST", None) + if namelist is not None: + namelist = namelist.split(',') + __umd__ = UserModuleDeleter(namelist=namelist) + else: + verbose = os.environ.get("PYDEV_UMD_VERBOSE", "").lower() == "true" + __umd__.run(verbose=verbose) + if args is not None and not isinstance(args, (bytes, str)): + raise TypeError("expected a character buffer object") + if namespace is None: + namespace = _get_globals() + if '__file__' in namespace: + old_file = namespace['__file__'] + else: + old_file = None + namespace['__file__'] = filename + sys.argv = [filename] + if args is not None: + for arg in args.split(): + sys.argv.append(arg) + if wdir is not None: + try: + if hasattr(wdir, 'decode'): + wdir = wdir.decode('utf-8') + except (UnicodeError, TypeError): + pass + os.chdir(wdir) + execfile(filename, namespace) + sys.argv = [''] + if old_file is None: + del namespace['__file__'] + else: + namespace['__file__'] = old_file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_versioncheck.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_versioncheck.py new file mode 120000 index 0000000..7a702e5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_bundle/pydev_versioncheck.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/56/91/a9/d5265e0cc3e29adfbee5dabb2c37c1ea62b23c645c02c4140b0b851eec \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d2be3b5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles.cpython-38.pyc new file mode 100644 index 0000000..c2c698b Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_coverage.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_coverage.cpython-38.pyc new file mode 100644 index 0000000..f4a05f1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_coverage.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_nose.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_nose.cpython-38.pyc new file mode 100644 index 0000000..2fdfcbb Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_nose.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_parallel.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_parallel.cpython-38.pyc new file mode 100644 index 0000000..395cac7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_parallel.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_parallel_client.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_parallel_client.cpython-38.pyc new file mode 100644 index 0000000..75743f5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_parallel_client.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_pytest2.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_pytest2.cpython-38.pyc new file mode 100644 index 0000000..17517ad Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_pytest2.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_unittest.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_unittest.cpython-38.pyc new file mode 100644 index 0000000..9e64fe0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_unittest.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_xml_rpc.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_xml_rpc.cpython-38.pyc new file mode 100644 index 0000000..6b8a651 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/__pycache__/pydev_runfiles_xml_rpc.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles.py new file mode 100644 index 0000000..9c199e1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles.py @@ -0,0 +1,857 @@ +from __future__ import nested_scopes + +import fnmatch +import os.path +from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support +from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport +import re +import time + + +#======================================================================================================================= +# Configuration +#======================================================================================================================= +class Configuration: + + def __init__( + self, + files_or_dirs='', + verbosity=2, + include_tests=None, + tests=None, + port=None, + files_to_tests=None, + jobs=1, + split_jobs='tests', + coverage_output_dir=None, + coverage_include=None, + coverage_output_file=None, + exclude_files=None, + exclude_tests=None, + include_files=None, + django=False, + ): + self.files_or_dirs = files_or_dirs + self.verbosity = verbosity + self.include_tests = include_tests + self.tests = tests + self.port = port + self.files_to_tests = files_to_tests + self.jobs = jobs + self.split_jobs = split_jobs + self.django = django + + if include_tests: + assert isinstance(include_tests, (list, tuple)) + + if exclude_files: + assert isinstance(exclude_files, (list, tuple)) + + if exclude_tests: + assert isinstance(exclude_tests, (list, tuple)) + + self.exclude_files = exclude_files + self.include_files = include_files + self.exclude_tests = exclude_tests + + self.coverage_output_dir = coverage_output_dir + self.coverage_include = coverage_include + self.coverage_output_file = coverage_output_file + + def __str__(self): + return '''Configuration + - files_or_dirs: %s + - verbosity: %s + - tests: %s + - port: %s + - files_to_tests: %s + - jobs: %s + - split_jobs: %s + + - include_files: %s + - include_tests: %s + + - exclude_files: %s + - exclude_tests: %s + + - coverage_output_dir: %s + - coverage_include_dir: %s + - coverage_output_file: %s + + - django: %s +''' % ( + self.files_or_dirs, + self.verbosity, + self.tests, + self.port, + self.files_to_tests, + self.jobs, + self.split_jobs, + + self.include_files, + self.include_tests, + + self.exclude_files, + self.exclude_tests, + + self.coverage_output_dir, + self.coverage_include, + self.coverage_output_file, + + self.django, + ) + + +#======================================================================================================================= +# parse_cmdline +#======================================================================================================================= +def parse_cmdline(argv=None): + """ + Parses command line and returns test directories, verbosity, test filter and test suites + + usage: + runfiles.py -v|--verbosity -t|--tests dirs|files + + Multiprocessing options: + jobs=number (with the number of jobs to be used to run the tests) + split_jobs='module'|'tests' + if == module, a given job will always receive all the tests from a module + if == tests, the tests will be split independently of their originating module (default) + + --exclude_files = comma-separated list of patterns with files to exclude (fnmatch style) + --include_files = comma-separated list of patterns with files to include (fnmatch style) + --exclude_tests = comma-separated list of patterns with test names to exclude (fnmatch style) + + Note: if --tests is given, --exclude_files, --include_files and --exclude_tests are ignored! + """ + if argv is None: + argv = sys.argv + + verbosity = 2 + include_tests = None + tests = None + port = None + jobs = 1 + split_jobs = 'tests' + files_to_tests = {} + coverage_output_dir = None + coverage_include = None + exclude_files = None + exclude_tests = None + include_files = None + django = False + + from _pydev_bundle._pydev_getopt import gnu_getopt + optlist, dirs = gnu_getopt( + argv[1:], "", + [ + "verbosity=", + "tests=", + + "port=", + "config_file=", + + "jobs=", + "split_jobs=", + + "include_tests=", + "include_files=", + + "exclude_files=", + "exclude_tests=", + + "coverage_output_dir=", + "coverage_include=", + + "django=" + ] + ) + + for opt, value in optlist: + if opt in ("-v", "--verbosity"): + verbosity = value + + elif opt in ("-p", "--port"): + port = int(value) + + elif opt in ("-j", "--jobs"): + jobs = int(value) + + elif opt in ("-s", "--split_jobs"): + split_jobs = value + if split_jobs not in ('module', 'tests'): + raise AssertionError('Expected split to be either "module" or "tests". Was :%s' % (split_jobs,)) + + elif opt in ("-d", "--coverage_output_dir",): + coverage_output_dir = value.strip() + + elif opt in ("-i", "--coverage_include",): + coverage_include = value.strip() + + elif opt in ("-I", "--include_tests"): + include_tests = value.split(',') + + elif opt in ("-E", "--exclude_files"): + exclude_files = value.split(',') + + elif opt in ("-F", "--include_files"): + include_files = value.split(',') + + elif opt in ("-e", "--exclude_tests"): + exclude_tests = value.split(',') + + elif opt in ("-t", "--tests"): + tests = value.split(',') + + elif opt in ("--django",): + django = value.strip() in ['true', 'True', '1'] + + elif opt in ("-c", "--config_file"): + config_file = value.strip() + if os.path.exists(config_file): + f = open(config_file, 'r') + try: + config_file_contents = f.read() + finally: + f.close() + + if config_file_contents: + config_file_contents = config_file_contents.strip() + + if config_file_contents: + for line in config_file_contents.splitlines(): + file_and_test = line.split('|') + if len(file_and_test) == 2: + file, test = file_and_test + if file in files_to_tests: + files_to_tests[file].append(test) + else: + files_to_tests[file] = [test] + + else: + sys.stderr.write('Could not find config file: %s\n' % (config_file,)) + + if type([]) != type(dirs): + dirs = [dirs] + + ret_dirs = [] + for d in dirs: + if '|' in d: + # paths may come from the ide separated by | + ret_dirs.extend(d.split('|')) + else: + ret_dirs.append(d) + + verbosity = int(verbosity) + + if tests: + if verbosity > 4: + sys.stdout.write('--tests provided. Ignoring --exclude_files, --exclude_tests and --include_files\n') + exclude_files = exclude_tests = include_files = None + + config = Configuration( + ret_dirs, + verbosity, + include_tests, + tests, + port, + files_to_tests, + jobs, + split_jobs, + coverage_output_dir, + coverage_include, + exclude_files=exclude_files, + exclude_tests=exclude_tests, + include_files=include_files, + django=django, + ) + + if verbosity > 5: + sys.stdout.write(str(config) + '\n') + return config + + +#======================================================================================================================= +# PydevTestRunner +#======================================================================================================================= +class PydevTestRunner(object): + """ finds and runs a file or directory of files as a unit test """ + + __py_extensions = ["*.py", "*.pyw"] + __exclude_files = ["__init__.*"] + + # Just to check that only this attributes will be written to this file + __slots__ = [ + 'verbosity', # Always used + + 'files_to_tests', # If this one is given, the ones below are not used + + 'files_or_dirs', # Files or directories received in the command line + 'include_tests', # The filter used to collect the tests + 'tests', # Strings with the tests to be run + + 'jobs', # Integer with the number of jobs that should be used to run the test cases + 'split_jobs', # String with 'tests' or 'module' (how should the jobs be split) + + 'configuration', + 'coverage', + ] + + def __init__(self, configuration): + self.verbosity = configuration.verbosity + + self.jobs = configuration.jobs + self.split_jobs = configuration.split_jobs + + files_to_tests = configuration.files_to_tests + if files_to_tests: + self.files_to_tests = files_to_tests + self.files_or_dirs = list(files_to_tests.keys()) + self.tests = None + else: + self.files_to_tests = {} + self.files_or_dirs = configuration.files_or_dirs + self.tests = configuration.tests + + self.configuration = configuration + self.__adjust_path() + + def __adjust_path(self): + """ add the current file or directory to the python path """ + path_to_append = None + for n in range(len(self.files_or_dirs)): + dir_name = self.__unixify(self.files_or_dirs[n]) + if os.path.isdir(dir_name): + if not dir_name.endswith("/"): + self.files_or_dirs[n] = dir_name + "/" + path_to_append = os.path.normpath(dir_name) + elif os.path.isfile(dir_name): + path_to_append = os.path.dirname(dir_name) + else: + if not os.path.exists(dir_name): + block_line = '*' * 120 + sys.stderr.write('\n%s\n* PyDev test runner error: %s does not exist.\n%s\n' % (block_line, dir_name, block_line)) + return + msg = ("unknown type. \n%s\nshould be file or a directory.\n" % (dir_name)) + raise RuntimeError(msg) + if path_to_append is not None: + # Add it as the last one (so, first things are resolved against the default dirs and + # if none resolves, then we try a relative import). + sys.path.append(path_to_append) + + def __is_valid_py_file(self, fname): + """ tests that a particular file contains the proper file extension + and is not in the list of files to exclude """ + is_valid_fname = 0 + for invalid_fname in self.__class__.__exclude_files: + is_valid_fname += int(not fnmatch.fnmatch(fname, invalid_fname)) + if_valid_ext = 0 + for ext in self.__class__.__py_extensions: + if_valid_ext += int(fnmatch.fnmatch(fname, ext)) + return is_valid_fname > 0 and if_valid_ext > 0 + + def __unixify(self, s): + """ stupid windows. converts the backslash to forwardslash for consistency """ + return os.path.normpath(s).replace(os.sep, "/") + + def __importify(self, s, dir=False): + """ turns directory separators into dots and removes the ".py*" extension + so the string can be used as import statement """ + if not dir: + dirname, fname = os.path.split(s) + + if fname.count('.') > 1: + # if there's a file named xxx.xx.py, it is not a valid module, so, let's not load it... + return + + imp_stmt_pieces = [dirname.replace("\\", "/").replace("/", "."), os.path.splitext(fname)[0]] + + if len(imp_stmt_pieces[0]) == 0: + imp_stmt_pieces = imp_stmt_pieces[1:] + + return ".".join(imp_stmt_pieces) + + else: # handle dir + return s.replace("\\", "/").replace("/", ".") + + def __add_files(self, pyfiles, root, files): + """ if files match, appends them to pyfiles. used by os.path.walk fcn """ + for fname in files: + if self.__is_valid_py_file(fname): + name_without_base_dir = self.__unixify(os.path.join(root, fname)) + pyfiles.append(name_without_base_dir) + + def find_import_files(self): + """ return a list of files to import """ + if self.files_to_tests: + pyfiles = self.files_to_tests.keys() + else: + pyfiles = [] + + for base_dir in self.files_or_dirs: + if os.path.isdir(base_dir): + for root, dirs, files in os.walk(base_dir): + # Note: handling directories that should be excluded from the search because + # they don't have __init__.py + exclude = {} + for d in dirs: + for init in ['__init__.py', '__init__.pyo', '__init__.pyc', '__init__.pyw', '__init__$py.class']: + if os.path.exists(os.path.join(root, d, init).replace('\\', '/')): + break + else: + exclude[d] = 1 + + if exclude: + new = [] + for d in dirs: + if d not in exclude: + new.append(d) + + dirs[:] = new + + self.__add_files(pyfiles, root, files) + + elif os.path.isfile(base_dir): + pyfiles.append(base_dir) + + if self.configuration.exclude_files or self.configuration.include_files: + ret = [] + for f in pyfiles: + add = True + basename = os.path.basename(f) + if self.configuration.include_files: + add = False + + for pat in self.configuration.include_files: + if fnmatch.fnmatchcase(basename, pat): + add = True + break + + if not add: + if self.verbosity > 3: + sys.stdout.write('Skipped file: %s (did not match any include_files pattern: %s)\n' % (f, self.configuration.include_files)) + + elif self.configuration.exclude_files: + for pat in self.configuration.exclude_files: + if fnmatch.fnmatchcase(basename, pat): + if self.verbosity > 3: + sys.stdout.write('Skipped file: %s (matched exclude_files pattern: %s)\n' % (f, pat)) + + elif self.verbosity > 2: + sys.stdout.write('Skipped file: %s\n' % (f,)) + + add = False + break + + if add: + if self.verbosity > 3: + sys.stdout.write('Adding file: %s for test discovery.\n' % (f,)) + ret.append(f) + + pyfiles = ret + + return pyfiles + + def __get_module_from_str(self, modname, print_exception, pyfile): + """ Import the module in the given import path. + * Returns the "final" module, so importing "coilib40.subject.visu" + returns the "visu" module, not the "coilib40" as returned by __import__ """ + try: + mod = __import__(modname) + for part in modname.split('.')[1:]: + mod = getattr(mod, part) + return mod + except: + if print_exception: + from _pydev_runfiles import pydev_runfiles_xml_rpc + from _pydevd_bundle import pydevd_io + buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr') + buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout') + try: + import traceback;traceback.print_exc() + sys.stderr.write('ERROR: Module: %s could not be imported (file: %s).\n' % (modname, pyfile)) + finally: + pydevd_io.end_redirect('stderr') + pydevd_io.end_redirect('stdout') + + pydev_runfiles_xml_rpc.notifyTest( + 'error', buf_out.getvalue(), buf_err.getvalue(), pyfile, modname, 0) + + return None + + def remove_duplicates_keeping_order(self, seq): + seen = set() + seen_add = seen.add + return [x for x in seq if not (x in seen or seen_add(x))] + + def find_modules_from_files(self, pyfiles): + """ returns a list of modules given a list of files """ + # let's make sure that the paths we want are in the pythonpath... + imports = [(s, self.__importify(s)) for s in pyfiles] + + sys_path = [os.path.normpath(path) for path in sys.path] + sys_path = self.remove_duplicates_keeping_order(sys_path) + + system_paths = [] + for s in sys_path: + system_paths.append(self.__importify(s, True)) + + ret = [] + for pyfile, imp in imports: + if imp is None: + continue # can happen if a file is not a valid module + choices = [] + for s in system_paths: + if imp.startswith(s): + add = imp[len(s) + 1:] + if add: + choices.append(add) + # sys.stdout.write(' ' + add + ' ') + + if not choices: + sys.stdout.write('PYTHONPATH not found for file: %s\n' % imp) + else: + for i, import_str in enumerate(choices): + print_exception = i == len(choices) - 1 + mod = self.__get_module_from_str(import_str, print_exception, pyfile) + if mod is not None: + ret.append((pyfile, mod, import_str)) + break + + return ret + + #=================================================================================================================== + # GetTestCaseNames + #=================================================================================================================== + class GetTestCaseNames: + """Yes, we need a class for that (cannot use outer context on jython 2.1)""" + + def __init__(self, accepted_classes, accepted_methods): + self.accepted_classes = accepted_classes + self.accepted_methods = accepted_methods + + def __call__(self, testCaseClass): + """Return a sorted sequence of method names found within testCaseClass""" + testFnNames = [] + className = testCaseClass.__name__ + + if className in self.accepted_classes: + for attrname in dir(testCaseClass): + # If a class is chosen, we select all the 'test' methods' + if attrname.startswith('test') and hasattr(getattr(testCaseClass, attrname), '__call__'): + testFnNames.append(attrname) + + else: + for attrname in dir(testCaseClass): + # If we have the class+method name, we must do a full check and have an exact match. + if className + '.' + attrname in self.accepted_methods: + if hasattr(getattr(testCaseClass, attrname), '__call__'): + testFnNames.append(attrname) + + # sorted() is not available in jython 2.1 + testFnNames.sort() + return testFnNames + + def _decorate_test_suite(self, suite, pyfile, module_name): + import unittest + if isinstance(suite, unittest.TestSuite): + add = False + suite.__pydev_pyfile__ = pyfile + suite.__pydev_module_name__ = module_name + + for t in suite._tests: + t.__pydev_pyfile__ = pyfile + t.__pydev_module_name__ = module_name + if self._decorate_test_suite(t, pyfile, module_name): + add = True + + return add + + elif isinstance(suite, unittest.TestCase): + return True + + else: + return False + + def find_tests_from_modules(self, file_and_modules_and_module_name): + """ returns the unittests given a list of modules """ + # Use our own suite! + from _pydev_runfiles import pydev_runfiles_unittest + import unittest + unittest.TestLoader.suiteClass = pydev_runfiles_unittest.PydevTestSuite + loader = unittest.TestLoader() + + ret = [] + if self.files_to_tests: + for pyfile, m, module_name in file_and_modules_and_module_name: + accepted_classes = {} + accepted_methods = {} + tests = self.files_to_tests[pyfile] + for t in tests: + accepted_methods[t] = t + + loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods) + + suite = loader.loadTestsFromModule(m) + if self._decorate_test_suite(suite, pyfile, module_name): + ret.append(suite) + return ret + + if self.tests: + accepted_classes = {} + accepted_methods = {} + + for t in self.tests: + splitted = t.split('.') + if len(splitted) == 1: + accepted_classes[t] = t + + elif len(splitted) == 2: + accepted_methods[t] = t + + loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods) + + for pyfile, m, module_name in file_and_modules_and_module_name: + suite = loader.loadTestsFromModule(m) + if self._decorate_test_suite(suite, pyfile, module_name): + ret.append(suite) + + return ret + + def filter_tests(self, test_objs, internal_call=False): + """ based on a filter name, only return those tests that have + the test case names that match """ + import unittest + if not internal_call: + if not self.configuration.include_tests and not self.tests and not self.configuration.exclude_tests: + # No need to filter if we have nothing to filter! + return test_objs + + if self.verbosity > 1: + if self.configuration.include_tests: + sys.stdout.write('Tests to include: %s\n' % (self.configuration.include_tests,)) + + if self.tests: + sys.stdout.write('Tests to run: %s\n' % (self.tests,)) + + if self.configuration.exclude_tests: + sys.stdout.write('Tests to exclude: %s\n' % (self.configuration.exclude_tests,)) + + test_suite = [] + for test_obj in test_objs: + + if isinstance(test_obj, unittest.TestSuite): + # Note: keep the suites as they are and just 'fix' the tests (so, don't use the iter_tests). + if test_obj._tests: + test_obj._tests = self.filter_tests(test_obj._tests, True) + if test_obj._tests: # Only add the suite if we still have tests there. + test_suite.append(test_obj) + + elif isinstance(test_obj, unittest.TestCase): + try: + testMethodName = test_obj._TestCase__testMethodName + except AttributeError: + # changed in python 2.5 + testMethodName = test_obj._testMethodName + + add = True + if self.configuration.exclude_tests: + for pat in self.configuration.exclude_tests: + if fnmatch.fnmatchcase(testMethodName, pat): + if self.verbosity > 3: + sys.stdout.write('Skipped test: %s (matched exclude_tests pattern: %s)\n' % (testMethodName, pat)) + + elif self.verbosity > 2: + sys.stdout.write('Skipped test: %s\n' % (testMethodName,)) + + add = False + break + + if add: + if self.__match_tests(self.tests, test_obj, testMethodName): + include = True + if self.configuration.include_tests: + include = False + for pat in self.configuration.include_tests: + if fnmatch.fnmatchcase(testMethodName, pat): + include = True + break + if include: + test_suite.append(test_obj) + else: + if self.verbosity > 3: + sys.stdout.write('Skipped test: %s (did not match any include_tests pattern %s)\n' % ( + testMethodName, self.configuration.include_tests,)) + return test_suite + + def iter_tests(self, test_objs): + # Note: not using yield because of Jython 2.1. + import unittest + tests = [] + for test_obj in test_objs: + if isinstance(test_obj, unittest.TestSuite): + tests.extend(self.iter_tests(test_obj._tests)) + + elif isinstance(test_obj, unittest.TestCase): + tests.append(test_obj) + return tests + + def list_test_names(self, test_objs): + names = [] + for tc in self.iter_tests(test_objs): + try: + testMethodName = tc._TestCase__testMethodName + except AttributeError: + # changed in python 2.5 + testMethodName = tc._testMethodName + names.append(testMethodName) + return names + + def __match_tests(self, tests, test_case, test_method_name): + if not tests: + return 1 + + for t in tests: + class_and_method = t.split('.') + if len(class_and_method) == 1: + # only class name + if class_and_method[0] == test_case.__class__.__name__: + return 1 + + elif len(class_and_method) == 2: + if class_and_method[0] == test_case.__class__.__name__ and class_and_method[1] == test_method_name: + return 1 + + return 0 + + def __match(self, filter_list, name): + """ returns whether a test name matches the test filter """ + if filter_list is None: + return 1 + for f in filter_list: + if re.match(f, name): + return 1 + return 0 + + def run_tests(self, handle_coverage=True): + """ runs all tests """ + sys.stdout.write("Finding files... ") + files = self.find_import_files() + if self.verbosity > 3: + sys.stdout.write('%s ... done.\n' % (self.files_or_dirs)) + else: + sys.stdout.write('done.\n') + sys.stdout.write("Importing test modules ... ") + + if handle_coverage: + coverage_files, coverage = start_coverage_support(self.configuration) + + file_and_modules_and_module_name = self.find_modules_from_files(files) + sys.stdout.write("done.\n") + + all_tests = self.find_tests_from_modules(file_and_modules_and_module_name) + all_tests = self.filter_tests(all_tests) + + from _pydev_runfiles import pydev_runfiles_unittest + test_suite = pydev_runfiles_unittest.PydevTestSuite(all_tests) + from _pydev_runfiles import pydev_runfiles_xml_rpc + pydev_runfiles_xml_rpc.notifyTestsCollected(test_suite.countTestCases()) + + start_time = time.time() + + def run_tests(): + executed_in_parallel = False + if self.jobs > 1: + from _pydev_runfiles import pydev_runfiles_parallel + + # What may happen is that the number of jobs needed is lower than the number of jobs requested + # (e.g.: 2 jobs were requested for running 1 test) -- in which case execute_tests_in_parallel will + # return False and won't run any tests. + executed_in_parallel = pydev_runfiles_parallel.execute_tests_in_parallel( + all_tests, self.jobs, self.split_jobs, self.verbosity, coverage_files, self.configuration.coverage_include) + + if not executed_in_parallel: + # If in coverage, we don't need to pass anything here (coverage is already enabled for this execution). + runner = pydev_runfiles_unittest.PydevTextTestRunner(stream=sys.stdout, descriptions=1, verbosity=self.verbosity) + sys.stdout.write('\n') + runner.run(test_suite) + + if self.configuration.django: + get_django_test_suite_runner()(run_tests).run_tests([]) + else: + run_tests() + + if handle_coverage: + coverage.stop() + coverage.save() + + total_time = 'Finished in: %.2f secs.' % (time.time() - start_time,) + pydev_runfiles_xml_rpc.notifyTestRunFinished(total_time) + + +DJANGO_TEST_SUITE_RUNNER = None + + +def get_django_test_suite_runner(): + global DJANGO_TEST_SUITE_RUNNER + if DJANGO_TEST_SUITE_RUNNER: + return DJANGO_TEST_SUITE_RUNNER + try: + # django >= 1.8 + import django + from django.test.runner import DiscoverRunner + + class MyDjangoTestSuiteRunner(DiscoverRunner): + + def __init__(self, on_run_suite): + django.setup() + DiscoverRunner.__init__(self) + self.on_run_suite = on_run_suite + + def build_suite(self, *args, **kwargs): + pass + + def suite_result(self, *args, **kwargs): + pass + + def run_suite(self, *args, **kwargs): + self.on_run_suite() + + except: + # django < 1.8 + try: + from django.test.simple import DjangoTestSuiteRunner + except: + + class DjangoTestSuiteRunner: + + def __init__(self): + pass + + def run_tests(self, *args, **kwargs): + raise AssertionError("Unable to run suite with django.test.runner.DiscoverRunner nor django.test.simple.DjangoTestSuiteRunner because it couldn't be imported.") + + class MyDjangoTestSuiteRunner(DjangoTestSuiteRunner): + + def __init__(self, on_run_suite): + DjangoTestSuiteRunner.__init__(self) + self.on_run_suite = on_run_suite + + def build_suite(self, *args, **kwargs): + pass + + def suite_result(self, *args, **kwargs): + pass + + def run_suite(self, *args, **kwargs): + self.on_run_suite() + + DJANGO_TEST_SUITE_RUNNER = MyDjangoTestSuiteRunner + return DJANGO_TEST_SUITE_RUNNER + + +#======================================================================================================================= +# main +#======================================================================================================================= +def main(configuration): + PydevTestRunner(configuration).run_tests() diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py new file mode 120000 index 0000000..7e84f55 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b1/68/c0/446b75fb84995d80cd54c2f6d1b59ddc8046e6cb4f82ced0dc29b8b9bf \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_nose.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_nose.py new file mode 100644 index 0000000..20ea5b2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_nose.py @@ -0,0 +1,207 @@ +from nose.plugins.multiprocess import MultiProcessTestRunner # @UnresolvedImport +from nose.plugins.base import Plugin # @UnresolvedImport +import sys +from _pydev_runfiles import pydev_runfiles_xml_rpc +import time +from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support +from contextlib import contextmanager +from io import StringIO +import traceback + + +#======================================================================================================================= +# PydevPlugin +#======================================================================================================================= +class PydevPlugin(Plugin): + + def __init__(self, configuration): + self.configuration = configuration + Plugin.__init__(self) + + def begin(self): + # Called before any test is run (it's always called, with multiprocess or not) + self.start_time = time.time() + self.coverage_files, self.coverage = start_coverage_support(self.configuration) + + def finalize(self, result): + # Called after all tests are run (it's always called, with multiprocess or not) + self.coverage.stop() + self.coverage.save() + + pydev_runfiles_xml_rpc.notifyTestRunFinished('Finished in: %.2f secs.' % (time.time() - self.start_time,)) + + #=================================================================================================================== + # Methods below are not called with multiprocess (so, we monkey-patch MultiProcessTestRunner.consolidate + # so that they're called, but unfortunately we loose some info -- i.e.: the time for each test in this + # process). + #=================================================================================================================== + + class Sentinel(object): + pass + + @contextmanager + def _without_user_address(self, test): + # #PyDev-1095: Conflict between address in test and test.address() in PydevPlugin().report_cond() + user_test_instance = test.test + user_address = self.Sentinel + user_class_address = self.Sentinel + try: + if 'address' in user_test_instance.__dict__: + user_address = user_test_instance.__dict__.pop('address') + except: + # Just ignore anything here. + pass + try: + user_class_address = user_test_instance.__class__.address + del user_test_instance.__class__.address + except: + # Just ignore anything here. + pass + + try: + yield + finally: + if user_address is not self.Sentinel: + user_test_instance.__dict__['address'] = user_address + + if user_class_address is not self.Sentinel: + user_test_instance.__class__.address = user_class_address + + def _get_test_address(self, test): + try: + if hasattr(test, 'address'): + with self._without_user_address(test): + address = test.address() + + # test.address() is something as: + # ('D:\\workspaces\\temp\\test_workspace\\pytesting1\\src\\mod1\\hello.py', 'mod1.hello', 'TestCase.testMet1') + # + # and we must pass: location, test + # E.g.: ['D:\\src\\mod1\\hello.py', 'TestCase.testMet1'] + address = address[0], address[2] + else: + # multiprocess + try: + address = test[0], test[1] + except TypeError: + # It may be an error at setup, in which case it's not really a test, but a Context object. + f = test.context.__file__ + if f.endswith('.pyc'): + f = f[:-1] + elif f.endswith('$py.class'): + f = f[:-len('$py.class')] + '.py' + address = f, '?' + except: + sys.stderr.write("PyDev: Internal pydev error getting test address. Please report at the pydev bug tracker\n") + traceback.print_exc() + sys.stderr.write("\n\n\n") + address = '?', '?' + return address + + def report_cond(self, cond, test, captured_output, error=''): + ''' + @param cond: fail, error, ok + ''' + + address = self._get_test_address(test) + + error_contents = self.get_io_from_error(error) + try: + time_str = '%.2f' % (time.time() - test._pydev_start_time) + except: + time_str = '?' + + pydev_runfiles_xml_rpc.notifyTest(cond, captured_output, error_contents, address[0], address[1], time_str) + + def startTest(self, test): + test._pydev_start_time = time.time() + file, test = self._get_test_address(test) + pydev_runfiles_xml_rpc.notifyStartTest(file, test) + + def get_io_from_error(self, err): + if type(err) == type(()): + if len(err) != 3: + if len(err) == 2: + return err[1] # multiprocess + s = StringIO() + etype, value, tb = err + if isinstance(value, str): + return value + traceback.print_exception(etype, value, tb, file=s) + return s.getvalue() + return err + + def get_captured_output(self, test): + if hasattr(test, 'capturedOutput') and test.capturedOutput: + return test.capturedOutput + return '' + + def addError(self, test, err): + self.report_cond( + 'error', + test, + self.get_captured_output(test), + err, + ) + + def addFailure(self, test, err): + self.report_cond( + 'fail', + test, + self.get_captured_output(test), + err, + ) + + def addSuccess(self, test): + self.report_cond( + 'ok', + test, + self.get_captured_output(test), + '', + ) + + +PYDEV_NOSE_PLUGIN_SINGLETON = None + + +def start_pydev_nose_plugin_singleton(configuration): + global PYDEV_NOSE_PLUGIN_SINGLETON + PYDEV_NOSE_PLUGIN_SINGLETON = PydevPlugin(configuration) + return PYDEV_NOSE_PLUGIN_SINGLETON + + +original = MultiProcessTestRunner.consolidate + + +#======================================================================================================================= +# new_consolidate +#======================================================================================================================= +def new_consolidate(self, result, batch_result): + ''' + Used so that it can work with the multiprocess plugin. + Monkeypatched because nose seems a bit unsupported at this time (ideally + the plugin would have this support by default). + ''' + ret = original(self, result, batch_result) + + parent_frame = sys._getframe().f_back + # addr is something as D:\pytesting1\src\mod1\hello.py:TestCase.testMet4 + # so, convert it to what report_cond expects + addr = parent_frame.f_locals['addr'] + i = addr.rindex(':') + addr = [addr[:i], addr[i + 1:]] + + output, testsRun, failures, errors, errorClasses = batch_result + if failures or errors: + for failure in failures: + PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('fail', addr, output, failure) + + for error in errors: + PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('error', addr, output, error) + else: + PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('ok', addr, output) + + return ret + + +MultiProcessTestRunner.consolidate = new_consolidate diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py new file mode 100644 index 0000000..b34c45e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py @@ -0,0 +1,267 @@ +import unittest +from _pydev_bundle._pydev_saved_modules import thread +import queue as Queue +from _pydev_runfiles import pydev_runfiles_xml_rpc +import time +import os +import threading +import sys + + +#======================================================================================================================= +# flatten_test_suite +#======================================================================================================================= +def flatten_test_suite(test_suite, ret): + if isinstance(test_suite, unittest.TestSuite): + for t in test_suite._tests: + flatten_test_suite(t, ret) + + elif isinstance(test_suite, unittest.TestCase): + ret.append(test_suite) + + +#======================================================================================================================= +# execute_tests_in_parallel +#======================================================================================================================= +def execute_tests_in_parallel(tests, jobs, split, verbosity, coverage_files, coverage_include): + ''' + @param tests: list(PydevTestSuite) + A list with the suites to be run + + @param split: str + Either 'module' or the number of tests that should be run in each batch + + @param coverage_files: list(file) + A list with the files that should be used for giving coverage information (if empty, coverage information + should not be gathered). + + @param coverage_include: str + The pattern that should be included in the coverage. + + @return: bool + Returns True if the tests were actually executed in parallel. If the tests were not executed because only 1 + should be used (e.g.: 2 jobs were requested for running 1 test), False will be returned and no tests will be + run. + + It may also return False if in debug mode (in which case, multi-processes are not accepted) + ''' + try: + from _pydevd_bundle.pydevd_comm import get_global_debugger + if get_global_debugger() is not None: + return False + except: + pass # Ignore any error here. + + # This queue will receive the tests to be run. Each entry in a queue is a list with the tests to be run together When + # split == 'tests', each list will have a single element, when split == 'module', each list will have all the tests + # from a given module. + tests_queue = [] + + queue_elements = [] + if split == 'module': + module_to_tests = {} + for test in tests: + lst = [] + flatten_test_suite(test, lst) + for test in lst: + key = (test.__pydev_pyfile__, test.__pydev_module_name__) + module_to_tests.setdefault(key, []).append(test) + + for key, tests in module_to_tests.items(): + queue_elements.append(tests) + + if len(queue_elements) < jobs: + # Don't create jobs we will never use. + jobs = len(queue_elements) + + elif split == 'tests': + for test in tests: + lst = [] + flatten_test_suite(test, lst) + for test in lst: + queue_elements.append([test]) + + if len(queue_elements) < jobs: + # Don't create jobs we will never use. + jobs = len(queue_elements) + + else: + raise AssertionError('Do not know how to handle: %s' % (split,)) + + for test_cases in queue_elements: + test_queue_elements = [] + for test_case in test_cases: + try: + test_name = test_case.__class__.__name__ + "." + test_case._testMethodName + except AttributeError: + # Support for jython 2.1 (__testMethodName is pseudo-private in the test case) + test_name = test_case.__class__.__name__ + "." + test_case._TestCase__testMethodName + + test_queue_elements.append(test_case.__pydev_pyfile__ + '|' + test_name) + + tests_queue.append(test_queue_elements) + + if jobs < 2: + return False + + sys.stdout.write('Running tests in parallel with: %s jobs.\n' % (jobs,)) + + queue = Queue.Queue() + for item in tests_queue: + queue.put(item, block=False) + + providers = [] + clients = [] + for i in range(jobs): + test_cases_provider = CommunicationThread(queue) + providers.append(test_cases_provider) + + test_cases_provider.start() + port = test_cases_provider.port + + if coverage_files: + clients.append(ClientThread(i, port, verbosity, coverage_files.pop(0), coverage_include)) + else: + clients.append(ClientThread(i, port, verbosity)) + + for client in clients: + client.start() + + client_alive = True + while client_alive: + client_alive = False + for client in clients: + # Wait for all the clients to exit. + if not client.finished: + client_alive = True + time.sleep(.2) + break + + for provider in providers: + provider.shutdown() + + return True + + +#======================================================================================================================= +# CommunicationThread +#======================================================================================================================= +class CommunicationThread(threading.Thread): + + def __init__(self, tests_queue): + threading.Thread.__init__(self) + self.daemon = True + self.queue = tests_queue + self.finished = False + from _pydev_bundle.pydev_imports import SimpleXMLRPCServer + from _pydev_bundle import pydev_localhost + + # Create server + server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), 0), logRequests=False) + server.register_function(self.GetTestsToRun) + server.register_function(self.notifyStartTest) + server.register_function(self.notifyTest) + server.register_function(self.notifyCommands) + self.port = server.socket.getsockname()[1] + self.server = server + + def GetTestsToRun(self, job_id): + ''' + @param job_id: + + @return: list(str) + Each entry is a string in the format: filename|Test.testName + ''' + try: + ret = self.queue.get(block=False) + return ret + except: # Any exception getting from the queue (empty or not) means we finished our work on providing the tests. + self.finished = True + return [] + + def notifyCommands(self, job_id, commands): + # Batch notification. + for command in commands: + getattr(self, command[0])(job_id, *command[1], **command[2]) + + return True + + def notifyStartTest(self, job_id, *args, **kwargs): + pydev_runfiles_xml_rpc.notifyStartTest(*args, **kwargs) + return True + + def notifyTest(self, job_id, *args, **kwargs): + pydev_runfiles_xml_rpc.notifyTest(*args, **kwargs) + return True + + def shutdown(self): + if hasattr(self.server, 'shutdown'): + self.server.shutdown() + else: + self._shutdown = True + + def run(self): + if hasattr(self.server, 'shutdown'): + self.server.serve_forever() + else: + self._shutdown = False + while not self._shutdown: + self.server.handle_request() + + +#======================================================================================================================= +# Client +#======================================================================================================================= +class ClientThread(threading.Thread): + + def __init__(self, job_id, port, verbosity, coverage_output_file=None, coverage_include=None): + threading.Thread.__init__(self) + self.daemon = True + self.port = port + self.job_id = job_id + self.verbosity = verbosity + self.finished = False + self.coverage_output_file = coverage_output_file + self.coverage_include = coverage_include + + def _reader_thread(self, pipe, target): + while True: + target.write(pipe.read(1)) + + def run(self): + try: + from _pydev_runfiles import pydev_runfiles_parallel_client + # TODO: Support Jython: + # + # For jython, instead of using sys.executable, we should use: + # r'D:\bin\jdk_1_5_09\bin\java.exe', + # '-classpath', + # 'D:/bin/jython-2.2.1/jython.jar', + # 'org.python.util.jython', + + args = [ + sys.executable, + pydev_runfiles_parallel_client.__file__, + str(self.job_id), + str(self.port), + str(self.verbosity), + ] + + if self.coverage_output_file and self.coverage_include: + args.append(self.coverage_output_file) + args.append(self.coverage_include) + + import subprocess + if False: + proc = subprocess.Popen(args, env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + thread.start_new_thread(self._reader_thread, (proc.stdout, sys.stdout)) + + thread.start_new_thread(target=self._reader_thread, args=(proc.stderr, sys.stderr)) + else: + proc = subprocess.Popen(args, env=os.environ, shell=False) + proc.wait() + + finally: + self.finished = True + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py new file mode 120000 index 0000000..7ea5de0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c3/09/7c/501a46452a93bf0f1509d9764bb1b43be4b25ed3a654520f2b3e2887d1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py new file mode 100644 index 0000000..793097d --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py @@ -0,0 +1,306 @@ +from _pydev_runfiles import pydev_runfiles_xml_rpc +import pickle +import zlib +import base64 +import os +from pydevd_file_utils import canonical_normalized_path +import pytest +import sys +import time +from pathlib import Path + +#========================================================================= +# Load filters with tests we should skip +#========================================================================= +py_test_accept_filter = None + + +def _load_filters(): + global py_test_accept_filter + if py_test_accept_filter is None: + py_test_accept_filter = os.environ.get('PYDEV_PYTEST_SKIP') + if py_test_accept_filter: + py_test_accept_filter = pickle.loads( + zlib.decompress(base64.b64decode(py_test_accept_filter))) + + # Newer versions of pytest resolve symlinks, so, we + # may need to filter with a resolved path too. + new_dct = {} + for filename, value in py_test_accept_filter.items(): + new_dct[canonical_normalized_path(str(Path(filename).resolve()))] = value + + py_test_accept_filter.update(new_dct) + + else: + py_test_accept_filter = {} + + +def is_in_xdist_node(): + main_pid = os.environ.get('PYDEV_MAIN_PID') + if main_pid and main_pid != str(os.getpid()): + return True + return False + + +connected = False + + +def connect_to_server_for_communication_to_xml_rpc_on_xdist(): + global connected + if connected: + return + connected = True + if is_in_xdist_node(): + port = os.environ.get('PYDEV_PYTEST_SERVER') + if not port: + sys.stderr.write( + 'Error: no PYDEV_PYTEST_SERVER environment variable defined.\n') + else: + pydev_runfiles_xml_rpc.initialize_server(int(port), daemon=True) + + +PY2 = sys.version_info[0] <= 2 +PY3 = not PY2 + + +class State: + start_time = time.time() + buf_err = None + buf_out = None + + +def start_redirect(): + if State.buf_out is not None: + return + from _pydevd_bundle import pydevd_io + State.buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr') + State.buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout') + + +def get_curr_output(): + buf_out = State.buf_out + buf_err = State.buf_err + return buf_out.getvalue() if buf_out is not None else '', buf_err.getvalue() if buf_err is not None else '' + + +def pytest_unconfigure(): + if is_in_xdist_node(): + return + # Only report that it finished when on the main node (we don't want to report + # the finish on each separate node). + pydev_runfiles_xml_rpc.notifyTestRunFinished( + 'Finished in: %.2f secs.' % (time.time() - State.start_time,)) + + +def pytest_collection_modifyitems(session, config, items): + # A note: in xdist, this is not called on the main process, only in the + # secondary nodes, so, we'll actually make the filter and report it multiple + # times. + connect_to_server_for_communication_to_xml_rpc_on_xdist() + + _load_filters() + if not py_test_accept_filter: + pydev_runfiles_xml_rpc.notifyTestsCollected(len(items)) + return # Keep on going (nothing to filter) + + new_items = [] + for item in items: + f = canonical_normalized_path(str(item.parent.fspath)) + name = item.name + + if f not in py_test_accept_filter: + # print('Skip file: %s' % (f,)) + continue # Skip the file + + i = name.find('[') + name_without_parametrize = None + if i > 0: + name_without_parametrize = name[:i] + + accept_tests = py_test_accept_filter[f] + + if item.cls is not None: + class_name = item.cls.__name__ + else: + class_name = None + for test in accept_tests: + if test == name: + # Direct match of the test (just go on with the default + # loading) + new_items.append(item) + break + + if name_without_parametrize is not None and test == name_without_parametrize: + # This happens when parameterizing pytest tests on older versions + # of pytest where the test name doesn't include the fixture name + # in it. + new_items.append(item) + break + + if class_name is not None: + if test == class_name + '.' + name: + new_items.append(item) + break + + if name_without_parametrize is not None and test == class_name + '.' + name_without_parametrize: + new_items.append(item) + break + + if class_name == test: + new_items.append(item) + break + else: + pass + # print('Skip test: %s.%s. Accept: %s' % (class_name, name, accept_tests)) + + # Modify the original list + items[:] = new_items + pydev_runfiles_xml_rpc.notifyTestsCollected(len(items)) + + +try: + """ + pytest > 5.4 uses own version of TerminalWriter based on py.io.TerminalWriter + and assumes there is a specific method TerminalWriter._write_source + so try load pytest version first or fallback to default one + """ + from _pytest._io import TerminalWriter +except ImportError: + from py.io import TerminalWriter + + +def _get_error_contents_from_report(report): + if report.longrepr is not None: + try: + tw = TerminalWriter(stringio=True) + stringio = tw.stringio + except TypeError: + import io + stringio = io.StringIO() + tw = TerminalWriter(file=stringio) + tw.hasmarkup = False + report.toterminal(tw) + exc = stringio.getvalue() + s = exc.strip() + if s: + return s + + return '' + + +def pytest_collectreport(report): + error_contents = _get_error_contents_from_report(report) + if error_contents: + report_test('fail', '', '', '', error_contents, 0.0) + + +def append_strings(s1, s2): + if s1.__class__ == s2.__class__: + return s1 + s2 + + # Prefer str + if isinstance(s1, bytes): + s1 = s1.decode('utf-8', 'replace') + + if isinstance(s2, bytes): + s2 = s2.decode('utf-8', 'replace') + + return s1 + s2 + + +def pytest_runtest_logreport(report): + if is_in_xdist_node(): + # When running with xdist, we don't want the report to be called from the node, only + # from the main process. + return + report_duration = report.duration + report_when = report.when + report_outcome = report.outcome + + if hasattr(report, 'wasxfail'): + if report_outcome != 'skipped': + report_outcome = 'passed' + + if report_outcome == 'passed': + # passed on setup/teardown: no need to report if in setup or teardown + # (only on the actual test if it passed). + if report_when in ('setup', 'teardown'): + return + + status = 'ok' + + elif report_outcome == 'skipped': + status = 'skip' + + else: + # It has only passed, skipped and failed (no error), so, let's consider + # error if not on call. + if report_when in ('setup', 'teardown'): + status = 'error' + + else: + # any error in the call (not in setup or teardown) is considered a + # regular failure. + status = 'fail' + + # This will work if pytest is not capturing it, if it is, nothing will + # come from here... + captured_output, error_contents = getattr(report, 'pydev_captured_output', ''), getattr(report, 'pydev_error_contents', '') + for type_section, value in report.sections: + if value: + if type_section in ('err', 'stderr', 'Captured stderr call'): + error_contents = append_strings(error_contents, value) + else: + captured_output = append_strings(error_contents, value) + + filename = getattr(report, 'pydev_fspath_strpath', '') + test = report.location[2] + + if report_outcome != 'skipped': + # On skipped, we'll have a traceback for the skip, which is not what we + # want. + exc = _get_error_contents_from_report(report) + if exc: + if error_contents: + error_contents = append_strings(error_contents, '----------------------------- Exceptions -----------------------------\n') + error_contents = append_strings(error_contents, exc) + + report_test(status, filename, test, captured_output, error_contents, report_duration) + + +def report_test(status, filename, test, captured_output, error_contents, duration): + ''' + @param filename: 'D:\\src\\mod1\\hello.py' + @param test: 'TestCase.testMet1' + @param status: fail, error, ok + ''' + time_str = '%.2f' % (duration,) + pydev_runfiles_xml_rpc.notifyTest( + status, captured_output, error_contents, filename, test, time_str) + + +if not hasattr(pytest, 'hookimpl'): + raise AssertionError('Please upgrade pytest (the current version of pytest: %s is unsupported)' % (pytest.__version__,)) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_makereport(item, call): + outcome = yield + report = outcome.get_result() + report.pydev_fspath_strpath = item.fspath.strpath + report.pydev_captured_output, report.pydev_error_contents = get_curr_output() + + +@pytest.mark.tryfirst +def pytest_runtest_setup(item): + ''' + Note: with xdist will be on a secondary process. + ''' + # We have our own redirection: if xdist does its redirection, we'll have + # nothing in our contents (which is OK), but if it does, we'll get nothing + # from pytest but will get our own here. + start_redirect() + filename = item.fspath.strpath + test = item.location[2] + + pydev_runfiles_xml_rpc.notifyStartTest(filename, test) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py new file mode 100644 index 0000000..fff1ef9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py @@ -0,0 +1,150 @@ +import unittest as python_unittest +from _pydev_runfiles import pydev_runfiles_xml_rpc +import time +from _pydevd_bundle import pydevd_io +import traceback +from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport +from io import StringIO + + +#======================================================================================================================= +# PydevTextTestRunner +#======================================================================================================================= +class PydevTextTestRunner(python_unittest.TextTestRunner): + + def _makeResult(self): + return PydevTestResult(self.stream, self.descriptions, self.verbosity) + + +_PythonTextTestResult = python_unittest.TextTestRunner()._makeResult().__class__ + + +#======================================================================================================================= +# PydevTestResult +#======================================================================================================================= +class PydevTestResult(_PythonTextTestResult): + + def addSubTest(self, test, subtest, err): + """Called at the end of a subtest. + 'err' is None if the subtest ended successfully, otherwise it's a + tuple of values as returned by sys.exc_info(). + """ + _PythonTextTestResult.addSubTest(self, test, subtest, err) + if err is not None: + subdesc = subtest._subDescription() + error = (test, self._exc_info_to_string(err, test)) + self._reportErrors([error], [], '', '%s %s' % (self.get_test_name(test), subdesc)) + + def startTest(self, test): + _PythonTextTestResult.startTest(self, test) + self.buf = pydevd_io.start_redirect(keep_original_redirection=True, std='both') + self.start_time = time.time() + self._current_errors_stack = [] + self._current_failures_stack = [] + + try: + test_name = test.__class__.__name__ + "." + test._testMethodName + except AttributeError: + # Support for jython 2.1 (__testMethodName is pseudo-private in the test case) + test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName + + pydev_runfiles_xml_rpc.notifyStartTest( + test.__pydev_pyfile__, test_name) + + def get_test_name(self, test): + try: + try: + test_name = test.__class__.__name__ + "." + test._testMethodName + except AttributeError: + # Support for jython 2.1 (__testMethodName is pseudo-private in the test case) + try: + test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName + # Support for class/module exceptions (test is instance of _ErrorHolder) + except: + test_name = test.description.split()[1][1:-1] + ' <' + test.description.split()[0] + '>' + except: + traceback.print_exc() + return '' + return test_name + + def stopTest(self, test): + end_time = time.time() + pydevd_io.end_redirect(std='both') + + _PythonTextTestResult.stopTest(self, test) + + captured_output = self.buf.getvalue() + del self.buf + error_contents = '' + test_name = self.get_test_name(test) + + diff_time = '%.2f' % (end_time - self.start_time) + + skipped = False + outcome = getattr(test, '_outcome', None) + if outcome is not None: + skipped = bool(getattr(outcome, 'skipped', None)) + + if skipped: + pydev_runfiles_xml_rpc.notifyTest( + 'skip', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time) + elif not self._current_errors_stack and not self._current_failures_stack: + pydev_runfiles_xml_rpc.notifyTest( + 'ok', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time) + else: + self._reportErrors(self._current_errors_stack, self._current_failures_stack, captured_output, test_name) + + def _reportErrors(self, errors, failures, captured_output, test_name, diff_time=''): + error_contents = [] + for test, s in errors + failures: + if type(s) == type((1,)): # If it's a tuple (for jython 2.1) + sio = StringIO() + traceback.print_exception(s[0], s[1], s[2], file=sio) + s = sio.getvalue() + error_contents.append(s) + + sep = '\n' + self.separator1 + error_contents = sep.join(error_contents) + + if errors and not failures: + try: + pydev_runfiles_xml_rpc.notifyTest( + 'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time) + except: + file_start = error_contents.find('File "') + file_end = error_contents.find('", ', file_start) + if file_start != -1 and file_end != -1: + file = error_contents[file_start + 6:file_end] + else: + file = '' + pydev_runfiles_xml_rpc.notifyTest( + 'error', captured_output, error_contents, file, test_name, diff_time) + + elif failures and not errors: + pydev_runfiles_xml_rpc.notifyTest( + 'fail', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time) + + else: # Ok, we got both, errors and failures. Let's mark it as an error in the end. + pydev_runfiles_xml_rpc.notifyTest( + 'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time) + + def addError(self, test, err): + _PythonTextTestResult.addError(self, test, err) + # Support for class/module exceptions (test is instance of _ErrorHolder) + if not hasattr(self, '_current_errors_stack') or test.__class__.__name__ == '_ErrorHolder': + # Not in start...end, so, report error now (i.e.: django pre/post-setup) + self._reportErrors([self.errors[-1]], [], '', self.get_test_name(test)) + else: + self._current_errors_stack.append(self.errors[-1]) + + def addFailure(self, test, err): + _PythonTextTestResult.addFailure(self, test, err) + if not hasattr(self, '_current_failures_stack'): + # Not in start...end, so, report error now (i.e.: django pre/post-setup) + self._reportErrors([], [self.failures[-1]], '', self.get_test_name(test)) + else: + self._current_failures_stack.append(self.failures[-1]) + + +class PydevTestSuite(python_unittest.TestSuite): + pass diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py new file mode 100644 index 0000000..b4d6b5c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py @@ -0,0 +1,257 @@ +import sys +import threading +import traceback +import warnings + +from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding +from _pydev_bundle.pydev_imports import xmlrpclib, _queue +from _pydevd_bundle.pydevd_constants import Null + +Queue = _queue.Queue + +# This may happen in IronPython (in Python it shouldn't happen as there are +# 'fast' replacements that are used in xmlrpclib.py) +warnings.filterwarnings( + 'ignore', 'The xmllib module is obsolete.*', DeprecationWarning) + +file_system_encoding = getfilesystemencoding() + + +#======================================================================================================================= +# _ServerHolder +#======================================================================================================================= +class _ServerHolder: + ''' + Helper so that we don't have to use a global here. + ''' + SERVER = None + + +#======================================================================================================================= +# set_server +#======================================================================================================================= +def set_server(server): + _ServerHolder.SERVER = server + + +#======================================================================================================================= +# ParallelNotification +#======================================================================================================================= +class ParallelNotification(object): + + def __init__(self, method, args): + self.method = method + self.args = args + + def to_tuple(self): + return self.method, self.args + + +#======================================================================================================================= +# KillServer +#======================================================================================================================= +class KillServer(object): + pass + + +#======================================================================================================================= +# ServerFacade +#======================================================================================================================= +class ServerFacade(object): + + def __init__(self, notifications_queue): + self.notifications_queue = notifications_queue + + def notifyTestsCollected(self, *args): + self.notifications_queue.put_nowait(ParallelNotification('notifyTestsCollected', args)) + + def notifyConnected(self, *args): + self.notifications_queue.put_nowait(ParallelNotification('notifyConnected', args)) + + def notifyTestRunFinished(self, *args): + self.notifications_queue.put_nowait(ParallelNotification('notifyTestRunFinished', args)) + + def notifyStartTest(self, *args): + self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args)) + + def notifyTest(self, *args): + new_args = [] + for arg in args: + new_args.append(_encode_if_needed(arg)) + args = tuple(new_args) + self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args)) + + +#======================================================================================================================= +# ServerComm +#======================================================================================================================= +class ServerComm(threading.Thread): + + def __init__(self, notifications_queue, port, daemon=False): + threading.Thread.__init__(self) + self.setDaemon(daemon) # If False, wait for all the notifications to be passed before exiting! + self.finished = False + self.notifications_queue = notifications_queue + + from _pydev_bundle import pydev_localhost + + # It is necessary to specify an encoding, that matches + # the encoding of all bytes-strings passed into an + # XMLRPC call: "All 8-bit strings in the data structure are assumed to use the + # packet encoding. Unicode strings are automatically converted, + # where necessary." + # Byte strings most likely come from file names. + encoding = file_system_encoding + if encoding == "mbcs": + # Windos symbolic name for the system encoding CP_ACP. + # We need to convert it into a encoding that is recognized by Java. + # Unfortunately this is not always possible. You could use + # GetCPInfoEx and get a name similar to "windows-1251". Then + # you need a table to translate on a best effort basis. Much to complicated. + # ISO-8859-1 is good enough. + encoding = "ISO-8859-1" + + self.server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port), + encoding=encoding) + + def run(self): + while True: + kill_found = False + commands = [] + command = self.notifications_queue.get(block=True) + if isinstance(command, KillServer): + kill_found = True + else: + assert isinstance(command, ParallelNotification) + commands.append(command.to_tuple()) + + try: + while True: + command = self.notifications_queue.get(block=False) # No block to create a batch. + if isinstance(command, KillServer): + kill_found = True + else: + assert isinstance(command, ParallelNotification) + commands.append(command.to_tuple()) + except: + pass # That's OK, we're getting it until it becomes empty so that we notify multiple at once. + + if commands: + try: + self.server.notifyCommands(commands) + except: + traceback.print_exc() + + if kill_found: + self.finished = True + return + + +#======================================================================================================================= +# initialize_server +#======================================================================================================================= +def initialize_server(port, daemon=False): + if _ServerHolder.SERVER is None: + if port is not None: + notifications_queue = Queue() + _ServerHolder.SERVER = ServerFacade(notifications_queue) + _ServerHolder.SERVER_COMM = ServerComm(notifications_queue, port, daemon) + _ServerHolder.SERVER_COMM.start() + else: + # Create a null server, so that we keep the interface even without any connection. + _ServerHolder.SERVER = Null() + _ServerHolder.SERVER_COMM = Null() + + try: + _ServerHolder.SERVER.notifyConnected() + except: + traceback.print_exc() + + +#======================================================================================================================= +# notifyTest +#======================================================================================================================= +def notifyTestsCollected(tests_count): + assert tests_count is not None + try: + _ServerHolder.SERVER.notifyTestsCollected(tests_count) + except: + traceback.print_exc() + + +#======================================================================================================================= +# notifyStartTest +#======================================================================================================================= +def notifyStartTest(file, test): + ''' + @param file: the tests file (c:/temp/test.py) + @param test: the test ran (i.e.: TestCase.test1) + ''' + assert file is not None + if test is None: + test = '' # Could happen if we have an import error importing module. + + try: + _ServerHolder.SERVER.notifyStartTest(file, test) + except: + traceback.print_exc() + + +def _encode_if_needed(obj): + # In the java side we expect strings to be ISO-8859-1 (org.python.pydev.debug.pyunit.PyUnitServer.initializeDispatches().new Dispatch() {...}.getAsStr(Object)) + if isinstance(obj, str): # Unicode in py3 + return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace')) + + elif isinstance(obj, bytes): + try: + return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace')) + except: + return xmlrpclib.Binary(obj) # bytes already + + return obj + + +#======================================================================================================================= +# notifyTest +#======================================================================================================================= +def notifyTest(cond, captured_output, error_contents, file, test, time): + ''' + @param cond: ok, fail, error + @param captured_output: output captured from stdout + @param captured_output: output captured from stderr + @param file: the tests file (c:/temp/test.py) + @param test: the test ran (i.e.: TestCase.test1) + @param time: float with the number of seconds elapsed + ''' + assert cond is not None + assert captured_output is not None + assert error_contents is not None + assert file is not None + if test is None: + test = '' # Could happen if we have an import error importing module. + assert time is not None + try: + captured_output = _encode_if_needed(captured_output) + error_contents = _encode_if_needed(error_contents) + + _ServerHolder.SERVER.notifyTest(cond, captured_output, error_contents, file, test, time) + except: + traceback.print_exc() + + +#======================================================================================================================= +# notifyTestRunFinished +#======================================================================================================================= +def notifyTestRunFinished(total_time): + assert total_time is not None + try: + _ServerHolder.SERVER.notifyTestRunFinished(total_time) + except: + traceback.print_exc() + + +#======================================================================================================================= +# force_server_kill +#======================================================================================================================= +def force_server_kill(): + _ServerHolder.SERVER_COMM.notifications_queue.put_nowait(KillServer()) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..94c643c Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevconsole_code.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevconsole_code.cpython-38.pyc new file mode 100644 index 0000000..35738b5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevconsole_code.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_additional_thread_info.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_additional_thread_info.cpython-38.pyc new file mode 100644 index 0000000..b111b88 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_additional_thread_info.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_additional_thread_info_regular.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_additional_thread_info_regular.cpython-38.pyc new file mode 100644 index 0000000..39877e1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_additional_thread_info_regular.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_api.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_api.cpython-38.pyc new file mode 100644 index 0000000..6e2613b Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_api.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_breakpoints.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_breakpoints.cpython-38.pyc new file mode 100644 index 0000000..b4262a8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_breakpoints.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_bytecode_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_bytecode_utils.cpython-38.pyc new file mode 100644 index 0000000..c246f0c Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_bytecode_utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_code_to_source.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_code_to_source.cpython-38.pyc new file mode 100644 index 0000000..3281951 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_code_to_source.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_collect_bytecode_info.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_collect_bytecode_info.cpython-38.pyc new file mode 100644 index 0000000..2d20460 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_collect_bytecode_info.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_comm.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_comm.cpython-38.pyc new file mode 100644 index 0000000..6e5a06e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_comm.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_comm_constants.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_comm_constants.cpython-38.pyc new file mode 100644 index 0000000..4c4bb9e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_comm_constants.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_command_line_handling.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_command_line_handling.cpython-38.pyc new file mode 100644 index 0000000..abb1f53 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_command_line_handling.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_console.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_console.cpython-38.pyc new file mode 100644 index 0000000..ffa7f60 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_console.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_constants.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_constants.cpython-38.pyc new file mode 100644 index 0000000..0313578 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_constants.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_custom_frames.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_custom_frames.cpython-38.pyc new file mode 100644 index 0000000..af7073e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_custom_frames.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_cython_wrapper.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_cython_wrapper.cpython-38.pyc new file mode 100644 index 0000000..9cec5b9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_cython_wrapper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_daemon_thread.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_daemon_thread.cpython-38.pyc new file mode 100644 index 0000000..383b9da Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_daemon_thread.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_defaults.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_defaults.cpython-38.pyc new file mode 100644 index 0000000..d972648 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_defaults.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_dont_trace.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_dont_trace.cpython-38.pyc new file mode 100644 index 0000000..7300f87 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_dont_trace.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_dont_trace_files.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_dont_trace_files.cpython-38.pyc new file mode 100644 index 0000000..5bff4c9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_dont_trace_files.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_exec2.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_exec2.cpython-38.pyc new file mode 100644 index 0000000..c11edd8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_exec2.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_extension_api.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_extension_api.cpython-38.pyc new file mode 100644 index 0000000..aecde27 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_extension_api.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_extension_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_extension_utils.cpython-38.pyc new file mode 100644 index 0000000..3497d97 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_extension_utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_filtering.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_filtering.cpython-38.pyc new file mode 100644 index 0000000..777ea51 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_filtering.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_frame.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_frame.cpython-38.pyc new file mode 100644 index 0000000..5ff243f Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_frame.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_frame_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_frame_utils.cpython-38.pyc new file mode 100644 index 0000000..49bb235 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_frame_utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_gevent_integration.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_gevent_integration.cpython-38.pyc new file mode 100644 index 0000000..6454fb0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_gevent_integration.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_import_class.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_import_class.cpython-38.pyc new file mode 100644 index 0000000..6ddf9c2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_import_class.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_io.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_io.cpython-38.pyc new file mode 100644 index 0000000..0a8b408 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_io.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_json_debug_options.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_json_debug_options.cpython-38.pyc new file mode 100644 index 0000000..b0993bc Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_json_debug_options.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command.cpython-38.pyc new file mode 100644 index 0000000..9f791ac Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command_factory_json.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command_factory_json.cpython-38.pyc new file mode 100644 index 0000000..f74cdec Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command_factory_json.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command_factory_xml.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command_factory_xml.cpython-38.pyc new file mode 100644 index 0000000..374b4d6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_net_command_factory_xml.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_plugin_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_plugin_utils.cpython-38.pyc new file mode 100644 index 0000000..8bb9833 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_plugin_utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_process_net_command.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_process_net_command.cpython-38.pyc new file mode 100644 index 0000000..3436524 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_process_net_command.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_process_net_command_json.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_process_net_command_json.cpython-38.pyc new file mode 100644 index 0000000..f28cbab Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_process_net_command_json.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_referrers.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_referrers.cpython-38.pyc new file mode 100644 index 0000000..c611f7e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_referrers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_reload.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_reload.cpython-38.pyc new file mode 100644 index 0000000..9d8bcc4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_reload.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_resolver.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_resolver.cpython-38.pyc new file mode 100644 index 0000000..4c5f17e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_resolver.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_runpy.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_runpy.cpython-38.pyc new file mode 100644 index 0000000..9a50312 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_runpy.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_safe_repr.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_safe_repr.cpython-38.pyc new file mode 100644 index 0000000..fbdc173 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_safe_repr.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_save_locals.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_save_locals.cpython-38.pyc new file mode 100644 index 0000000..25575d0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_save_locals.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_signature.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_signature.cpython-38.pyc new file mode 100644 index 0000000..1d44541 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_signature.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_source_mapping.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_source_mapping.cpython-38.pyc new file mode 100644 index 0000000..9699d75 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_source_mapping.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_stackless.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_stackless.cpython-38.pyc new file mode 100644 index 0000000..7ed2b3a Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_stackless.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_suspended_frames.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_suspended_frames.cpython-38.pyc new file mode 100644 index 0000000..aedf218 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_suspended_frames.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_thread_lifecycle.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_thread_lifecycle.cpython-38.pyc new file mode 100644 index 0000000..ddc4317 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_thread_lifecycle.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_timeout.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_timeout.cpython-38.pyc new file mode 100644 index 0000000..9a2f9a0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_timeout.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_api.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_api.cpython-38.pyc new file mode 100644 index 0000000..c2ec58d Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_api.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_dispatch.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_dispatch.cpython-38.pyc new file mode 100644 index 0000000..4a98a91 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_dispatch.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_dispatch_regular.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_dispatch_regular.cpython-38.pyc new file mode 100644 index 0000000..ff3b761 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_trace_dispatch_regular.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_traceproperty.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_traceproperty.cpython-38.pyc new file mode 100644 index 0000000..1ea4405 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_traceproperty.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_utils.cpython-38.pyc new file mode 100644 index 0000000..544b7bc Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_vars.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_vars.cpython-38.pyc new file mode 100644 index 0000000..3090082 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_vars.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_vm_type.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_vm_type.cpython-38.pyc new file mode 100644 index 0000000..fed5068 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_vm_type.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_xml.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_xml.cpython-38.pyc new file mode 100644 index 0000000..c13ec6d Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/__pycache__/pydevd_xml.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py new file mode 120000 index 0000000..5188225 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ee/b7/03/bcabf83b07876c9255508ad535aabb9d1cdcd8d9db442598ff0a867978 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..a7c3542 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/__main__pydevd_gen_debug_adapter_protocol.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/__main__pydevd_gen_debug_adapter_protocol.cpython-38.pyc new file mode 100644 index 0000000..aac37ba Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/__main__pydevd_gen_debug_adapter_protocol.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_base_schema.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_base_schema.cpython-38.pyc new file mode 100644 index 0000000..01ebb58 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_base_schema.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_schema.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_schema.cpython-38.pyc new file mode 100644 index 0000000..c2da410 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_schema.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_schema_log.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_schema_log.cpython-38.pyc new file mode 100644 index 0000000..0ddb2fb Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__pycache__/pydevd_schema_log.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json new file mode 100644 index 0000000..8bbe944 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json @@ -0,0 +1,4084 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Debug Adapter Protocol", + "description": "The Debug Adapter Protocol defines the protocol used between an editor or IDE and a debugger or runtime.", + "type": "object", + + + "definitions": { + + "ProtocolMessage": { + "type": "object", + "title": "Base Protocol", + "description": "Base class of requests, responses, and events.", + "properties": { + "seq": { + "type": "integer", + "description": "Sequence number (also known as message ID). For protocol messages of type 'request' this ID can be used to cancel the request." + }, + "type": { + "type": "string", + "description": "Message type.", + "_enum": [ "request", "response", "event" ] + } + }, + "required": [ "seq", "type" ] + }, + + "Request": { + "allOf": [ { "$ref": "#/definitions/ProtocolMessage" }, { + "type": "object", + "description": "A client or debug adapter initiated request.", + "properties": { + "type": { + "type": "string", + "enum": [ "request" ] + }, + "command": { + "type": "string", + "description": "The command to execute." + }, + "arguments": { + "type": [ "array", "boolean", "integer", "null", "number" , "object", "string" ], + "description": "Object containing arguments for the command." + } + }, + "required": [ "type", "command" ] + }] + }, + + "Event": { + "allOf": [ { "$ref": "#/definitions/ProtocolMessage" }, { + "type": "object", + "description": "A debug adapter initiated event.", + "properties": { + "type": { + "type": "string", + "enum": [ "event" ] + }, + "event": { + "type": "string", + "description": "Type of event." + }, + "body": { + "type": [ "array", "boolean", "integer", "null", "number" , "object", "string" ], + "description": "Event-specific information." + } + }, + "required": [ "type", "event" ] + }] + }, + + "Response": { + "allOf": [ { "$ref": "#/definitions/ProtocolMessage" }, { + "type": "object", + "description": "Response for a request.", + "properties": { + "type": { + "type": "string", + "enum": [ "response" ] + }, + "request_seq": { + "type": "integer", + "description": "Sequence number of the corresponding request." + }, + "success": { + "type": "boolean", + "description": "Outcome of the request.\nIf true, the request was successful and the 'body' attribute may contain the result of the request.\nIf the value is false, the attribute 'message' contains the error in short form and the 'body' may contain additional information (see 'ErrorResponse.body.error')." + }, + "command": { + "type": "string", + "description": "The command requested." + }, + "message": { + "type": "string", + "description": "Contains the raw error in short form if 'success' is false.\nThis raw error might be interpreted by the frontend and is not shown in the UI.\nSome predefined values exist.", + "_enum": [ "cancelled" ], + "enumDescriptions": [ + "request was cancelled." + ] + }, + "body": { + "type": [ "array", "boolean", "integer", "null", "number" , "object", "string" ], + "description": "Contains request result if success is true and optional error details if success is false." + } + }, + "required": [ "type", "request_seq", "success", "command" ] + }] + }, + + "ErrorResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "On error (whenever 'success' is false), the body can provide more details.", + "properties": { + "body": { + "type": "object", + "properties": { + "error": { + "$ref": "#/definitions/Message", + "description": "An optional, structured error message." + } + } + } + }, + "required": [ "body" ] + }] + }, + + "CancelRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The 'cancel' request is used by the frontend in two situations:\n- to indicate that it is no longer interested in the result produced by a specific request issued earlier\n- to cancel a progress sequence. Clients should only call this request if the capability 'supportsCancelRequest' is true.\nThis request has a hint characteristic: a debug adapter can only be expected to make a 'best effort' in honouring this request but there are no guarantees.\nThe 'cancel' request may return an error if it could not cancel an operation but a frontend should refrain from presenting this error to end users.\nA frontend client should only call this request if the capability 'supportsCancelRequest' is true.\nThe request that got canceled still needs to send a response back. This can either be a normal result ('success' attribute true)\nor an error response ('success' attribute false and the 'message' set to 'cancelled').\nReturning partial results from a cancelled request is possible but please note that a frontend client has no generic way for detecting that a response is partial or not.\n The progress that got cancelled still needs to send a 'progressEnd' event back.\n A client should not assume that progress just got cancelled after sending the 'cancel' request.", + "properties": { + "command": { + "type": "string", + "enum": [ "cancel" ] + }, + "arguments": { + "$ref": "#/definitions/CancelArguments" + } + }, + "required": [ "command" ] + }] + }, + "CancelArguments": { + "type": "object", + "description": "Arguments for 'cancel' request.", + "properties": { + "requestId": { + "type": "integer", + "description": "The ID (attribute 'seq') of the request to cancel. If missing no request is cancelled.\nBoth a 'requestId' and a 'progressId' can be specified in one request." + }, + "progressId": { + "type": "string", + "description": "The ID (attribute 'progressId') of the progress to cancel. If missing no progress is cancelled.\nBoth a 'requestId' and a 'progressId' can be specified in one request." + } + } + }, + "CancelResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'cancel' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "InitializedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "title": "Events", + "description": "This event indicates that the debug adapter is ready to accept configuration requests (e.g. SetBreakpointsRequest, SetExceptionBreakpointsRequest).\nA debug adapter is expected to send this event when it is ready to accept configuration requests (but not before the 'initialize' request has finished).\nThe sequence of events/requests is as follows:\n- adapters sends 'initialized' event (after the 'initialize' request has returned)\n- frontend sends zero or more 'setBreakpoints' requests\n- frontend sends one 'setFunctionBreakpoints' request (if capability 'supportsFunctionBreakpoints' is true)\n- frontend sends a 'setExceptionBreakpoints' request if one or more 'exceptionBreakpointFilters' have been defined (or if 'supportsConfigurationDoneRequest' is not defined or false)\n- frontend sends other future configuration requests\n- frontend sends one 'configurationDone' request to indicate the end of the configuration.", + "properties": { + "event": { + "type": "string", + "enum": [ "initialized" ] + } + }, + "required": [ "event" ] + }] + }, + + "StoppedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that the execution of the debuggee has stopped due to some condition.\nThis can be caused by a break point previously set, a stepping request has completed, by executing a debugger statement etc.", + "properties": { + "event": { + "type": "string", + "enum": [ "stopped" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.\nFor backward compatibility this string is shown in the UI if the 'description' attribute is missing (but it must not be translated).", + "_enum": [ "step", "breakpoint", "exception", "pause", "entry", "goto", "function breakpoint", "data breakpoint", "instruction breakpoint" ] + }, + "description": { + "type": "string", + "description": "The full reason for the event, e.g. 'Paused on exception'. This string is shown in the UI as is and must be translated." + }, + "threadId": { + "type": "integer", + "description": "The thread which was stopped." + }, + "preserveFocusHint": { + "type": "boolean", + "description": "A value of true hints to the frontend that this event should not change the focus." + }, + "text": { + "type": "string", + "description": "Additional information. E.g. if reason is 'exception', text contains the exception name. This string is shown in the UI." + }, + "allThreadsStopped": { + "type": "boolean", + "description": "If 'allThreadsStopped' is true, a debug adapter can announce that all threads have stopped.\n- The client should use this information to enable that all threads can be expanded to access their stacktraces.\n- If the attribute is missing or false, only the thread with the given threadId can be expanded." + }, + "hitBreakpointIds": { + "type": "array", + "items": { + "type": "integer" + }, + "description": "Ids of the breakpoints that triggered the event. In most cases there will be only a single breakpoint but here are some examples for multiple breakpoints:\n- Different types of breakpoints map to the same location.\n- Multiple source breakpoints get collapsed to the same instruction by the compiler/runtime.\n- Multiple function breakpoints with different function names map to the same location." + } + }, + "required": [ "reason" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ContinuedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that the execution of the debuggee has continued.\nPlease note: a debug adapter is not expected to send this event in response to a request that implies that execution continues, e.g. 'launch' or 'continue'.\nIt is only necessary to send a 'continued' event if there was no previous request that implied this.", + "properties": { + "event": { + "type": "string", + "enum": [ "continued" ] + }, + "body": { + "type": "object", + "properties": { + "threadId": { + "type": "integer", + "description": "The thread which was continued." + }, + "allThreadsContinued": { + "type": "boolean", + "description": "If 'allThreadsContinued' is true, a debug adapter can announce that all threads have continued." + } + }, + "required": [ "threadId" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ExitedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that the debuggee has exited and returns its exit code.", + "properties": { + "event": { + "type": "string", + "enum": [ "exited" ] + }, + "body": { + "type": "object", + "properties": { + "exitCode": { + "type": "integer", + "description": "The exit code returned from the debuggee." + } + }, + "required": [ "exitCode" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "TerminatedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that debugging of the debuggee has terminated. This does **not** mean that the debuggee itself has exited.", + "properties": { + "event": { + "type": "string", + "enum": [ "terminated" ] + }, + "body": { + "type": "object", + "properties": { + "restart": { + "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], + "description": "A debug adapter may set 'restart' to true (or to an arbitrary object) to request that the front end restarts the session.\nThe value is not interpreted by the client and passed unmodified as an attribute '__restart' to the 'launch' and 'attach' requests." + } + } + } + }, + "required": [ "event" ] + }] + }, + + "ThreadEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that a thread has started or exited.", + "properties": { + "event": { + "type": "string", + "enum": [ "thread" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.", + "_enum": [ "started", "exited" ] + }, + "threadId": { + "type": "integer", + "description": "The identifier of the thread." + } + }, + "required": ["reason", "threadId"] + } + }, + "required": [ "event", "body" ] + }] + }, + + "OutputEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that the target has produced some output.", + "properties": { + "event": { + "type": "string", + "enum": [ "output" ] + }, + "body": { + "type": "object", + "properties": { + "category": { + "type": "string", + "description": "The output category. If not specified or if the category is not understand by the client, 'console' is assumed.", + "_enum": [ "console", "important", "stdout", "stderr", "telemetry" ], + "enumDescriptions": [ + "Show the output in the client's default message UI, e.g. a 'debug console'. This category should only be used for informational output from the debugger (as opposed to the debuggee).", + "A hint for the client to show the ouput in the client's UI for important and highly visible information, e.g. as a popup notification. This category should only be used for important messages from the debugger (as opposed to the debuggee). Since this category value is a hint, clients might ignore the hint and assume the 'console' category.", + "Show the output as normal program output from the debuggee.", + "Show the output as error program output from the debuggee.", + "Send the output to telemetry instead of showing it to the user." + ] + }, + "output": { + "type": "string", + "description": "The output to report." + }, + "group": { + "type": "string", + "description": "Support for keeping an output log organized by grouping related messages.", + "enum": [ "start", "startCollapsed", "end" ], + "enumDescriptions": [ + "Start a new group in expanded mode. Subsequent output events are members of the group and should be shown indented.\nThe 'output' attribute becomes the name of the group and is not indented.", + "Start a new group in collapsed mode. Subsequent output events are members of the group and should be shown indented (as soon as the group is expanded).\nThe 'output' attribute becomes the name of the group and is not indented.", + "End the current group and decreases the indentation of subsequent output events.\nA non empty 'output' attribute is shown as the unindented end of the group." + ] + }, + "variablesReference": { + "type": "integer", + "description": "If an attribute 'variablesReference' exists and its value is > 0, the output contains objects which can be retrieved by passing 'variablesReference' to the 'variables' request. The value should be less than or equal to 2147483647 (2^31-1)." + }, + "source": { + "$ref": "#/definitions/Source", + "description": "An optional source location where the output was produced." + }, + "line": { + "type": "integer", + "description": "An optional source location line where the output was produced." + }, + "column": { + "type": "integer", + "description": "An optional source location column where the output was produced." + }, + "data": { + "type": [ "array", "boolean", "integer", "null", "number" , "object", "string" ], + "description": "Optional data to report. For the 'telemetry' category the data will be sent to telemetry, for the other categories the data is shown in JSON format." + } + }, + "required": ["output"] + } + }, + "required": [ "event", "body" ] + }] + }, + + "BreakpointEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that some information about a breakpoint has changed.", + "properties": { + "event": { + "type": "string", + "enum": [ "breakpoint" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.", + "_enum": [ "changed", "new", "removed" ] + }, + "breakpoint": { + "$ref": "#/definitions/Breakpoint", + "description": "The 'id' attribute is used to find the target breakpoint and the other attributes are used as the new values." + } + }, + "required": [ "reason", "breakpoint" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ModuleEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that some information about a module has changed.", + "properties": { + "event": { + "type": "string", + "enum": [ "module" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.", + "enum": [ "new", "changed", "removed" ] + }, + "module": { + "$ref": "#/definitions/Module", + "description": "The new, changed, or removed module. In case of 'removed' only the module id is used." + } + }, + "required": [ "reason", "module" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "LoadedSourceEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that some source has been added, changed, or removed from the set of all loaded sources.", + "properties": { + "event": { + "type": "string", + "enum": [ "loadedSource" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.", + "enum": [ "new", "changed", "removed" ] + }, + "source": { + "$ref": "#/definitions/Source", + "description": "The new, changed, or removed source." + } + }, + "required": [ "reason", "source" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ProcessEvent": { + "allOf": [ + { "$ref": "#/definitions/Event" }, + { + "type": "object", + "description": "The event indicates that the debugger has begun debugging a new process. Either one that it has launched, or one that it has attached to.", + "properties": { + "event": { + "type": "string", + "enum": [ "process" ] + }, + "body": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The logical name of the process. This is usually the full path to process's executable file. Example: /home/example/myproj/program.js." + }, + "systemProcessId": { + "type": "integer", + "description": "The system process id of the debugged process. This property will be missing for non-system processes." + }, + "isLocalProcess": { + "type": "boolean", + "description": "If true, the process is running on the same computer as the debug adapter." + }, + "startMethod": { + "type": "string", + "enum": [ "launch", "attach", "attachForSuspendedLaunch" ], + "description": "Describes how the debug engine started debugging this process.", + "enumDescriptions": [ + "Process was launched under the debugger.", + "Debugger attached to an existing process.", + "A project launcher component has launched a new process in a suspended state and then asked the debugger to attach." + ] + }, + "pointerSize": { + "type": "integer", + "description": "The size of a pointer or address for this process, in bits. This value may be used by clients when formatting addresses for display." + } + }, + "required": [ "name" ] + } + }, + "required": [ "event", "body" ] + } + ] + }, + + "CapabilitiesEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event indicates that one or more capabilities have changed.\nSince the capabilities are dependent on the frontend and its UI, it might not be possible to change that at random times (or too late).\nConsequently this event has a hint characteristic: a frontend can only be expected to make a 'best effort' in honouring individual capabilities but there are no guarantees.\nOnly changed capabilities need to be included, all other capabilities keep their values.", + "properties": { + "event": { + "type": "string", + "enum": [ "capabilities" ] + }, + "body": { + "type": "object", + "properties": { + "capabilities": { + "$ref": "#/definitions/Capabilities", + "description": "The set of updated capabilities." + } + }, + "required": [ "capabilities" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ProgressStartEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event signals that a long running operation is about to start and\nprovides additional information for the client to set up a corresponding progress and cancellation UI.\nThe client is free to delay the showing of the UI in order to reduce flicker.\nThis event should only be sent if the client has passed the value true for the 'supportsProgressReporting' capability of the 'initialize' request.", + "properties": { + "event": { + "type": "string", + "enum": [ "progressStart" ] + }, + "body": { + "type": "object", + "properties": { + "progressId": { + "type": "string", + "description": "An ID that must be used in subsequent 'progressUpdate' and 'progressEnd' events to make them refer to the same progress reporting.\nIDs must be unique within a debug session." + }, + "title": { + "type": "string", + "description": "Mandatory (short) title of the progress reporting. Shown in the UI to describe the long running operation." + }, + "requestId": { + "type": "integer", + "description": "The request ID that this progress report is related to. If specified a debug adapter is expected to emit\nprogress events for the long running request until the request has been either completed or cancelled.\nIf the request ID is omitted, the progress report is assumed to be related to some general activity of the debug adapter." + }, + "cancellable": { + "type": "boolean", + "description": "If true, the request that reports progress may be canceled with a 'cancel' request.\nSo this property basically controls whether the client should use UX that supports cancellation.\nClients that don't support cancellation are allowed to ignore the setting." + }, + "message": { + "type": "string", + "description": "Optional, more detailed progress message." + }, + "percentage": { + "type": "number", + "description": "Optional progress percentage to display (value range: 0 to 100). If omitted no percentage will be shown." + } + }, + "required": [ "progressId", "title" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ProgressUpdateEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event signals that the progress reporting needs to updated with a new message and/or percentage.\nThe client does not have to update the UI immediately, but the clients needs to keep track of the message and/or percentage values.\nThis event should only be sent if the client has passed the value true for the 'supportsProgressReporting' capability of the 'initialize' request.", + "properties": { + "event": { + "type": "string", + "enum": [ "progressUpdate" ] + }, + "body": { + "type": "object", + "properties": { + "progressId": { + "type": "string", + "description": "The ID that was introduced in the initial 'progressStart' event." + }, + "message": { + "type": "string", + "description": "Optional, more detailed progress message. If omitted, the previous message (if any) is used." + }, + "percentage": { + "type": "number", + "description": "Optional progress percentage to display (value range: 0 to 100). If omitted no percentage will be shown." + } + }, + "required": [ "progressId" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ProgressEndEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "The event signals the end of the progress reporting with an optional final message.\nThis event should only be sent if the client has passed the value true for the 'supportsProgressReporting' capability of the 'initialize' request.", + "properties": { + "event": { + "type": "string", + "enum": [ "progressEnd" ] + }, + "body": { + "type": "object", + "properties": { + "progressId": { + "type": "string", + "description": "The ID that was introduced in the initial 'ProgressStartEvent'." + }, + "message": { + "type": "string", + "description": "Optional, more detailed progress message. If omitted, the previous message (if any) is used." + } + }, + "required": [ "progressId" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "InvalidatedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "This event signals that some state in the debug adapter has changed and requires that the client needs to re-render the data snapshot previously requested.\nDebug adapters do not have to emit this event for runtime changes like stopped or thread events because in that case the client refetches the new state anyway. But the event can be used for example to refresh the UI after rendering formatting has changed in the debug adapter.\nThis event should only be sent if the debug adapter has received a value true for the 'supportsInvalidatedEvent' capability of the 'initialize' request.", + "properties": { + "event": { + "type": "string", + "enum": [ "invalidated" ] + }, + "body": { + "type": "object", + "properties": { + "areas": { + "type": "array", + "description": "Optional set of logical areas that got invalidated. This property has a hint characteristic: a client can only be expected to make a 'best effort' in honouring the areas but there are no guarantees. If this property is missing, empty, or if values are not understand the client should assume a single value 'all'.", + "items": { + "$ref": "#/definitions/InvalidatedAreas" + } + }, + "threadId": { + "type": "integer", + "description": "If specified, the client only needs to refetch data related to this thread." + }, + "stackFrameId": { + "type": "integer", + "description": "If specified, the client only needs to refetch data related to this stack frame (and the 'threadId' is ignored)." + } + } + } + }, + "required": [ "event", "body" ] + }] + }, + + "MemoryEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "This event indicates that some memory range has been updated. It should only be sent if the debug adapter has received a value true for the `supportsMemoryEvent` capability of the `initialize` request.\nClients typically react to the event by re-issuing a `readMemory` request if they show the memory identified by the `memoryReference` and if the updated memory range overlaps the displayed range. Clients should not make assumptions how individual memory references relate to each other, so they should not assume that they are part of a single continuous address range and might overlap.\nDebug adapters can use this event to indicate that the contents of a memory range has changed due to some other DAP request like `setVariable` or `setExpression`. Debug adapters are not expected to emit this event for each and every memory change of a running program, because that information is typically not available from debuggers and it would flood clients with too many events.", + "properties": { + "event": { + "type": "string", + "enum": [ "memory" ] + }, + "body": { + "type": "object", + "properties": { + "memoryReference": { + "type": "string", + "description": "Memory reference of a memory range that has been updated." + }, + "offset": { + "type": "integer", + "description": "Starting offset in bytes where memory has been updated. Can be negative." + }, + "count": { + "type": "integer", + "description": "Number of bytes updated." + } + }, + "required": [ "memoryReference", "offset", "count" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "RunInTerminalRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "title": "Reverse Requests", + "description": "This optional request is sent from the debug adapter to the client to run a command in a terminal.\nThis is typically used to launch the debuggee in a terminal provided by the client.\nThis request should only be called if the client has passed the value true for the 'supportsRunInTerminalRequest' capability of the 'initialize' request.", + "properties": { + "command": { + "type": "string", + "enum": [ "runInTerminal" ] + }, + "arguments": { + "$ref": "#/definitions/RunInTerminalRequestArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "RunInTerminalRequestArguments": { + "type": "object", + "description": "Arguments for 'runInTerminal' request.", + "properties": { + "kind": { + "type": "string", + "enum": [ "integrated", "external" ], + "description": "What kind of terminal to launch." + }, + "title": { + "type": "string", + "description": "Optional title of the terminal." + }, + "cwd": { + "type": "string", + "description": "Working directory for the command. For non-empty, valid paths this typically results in execution of a change directory command." + }, + "args": { + "type": "array", + "items": { + "type": "string" + }, + "description": "List of arguments. The first argument is the command to run." + }, + "env": { + "type": "object", + "description": "Environment key-value pairs that are added to or removed from the default environment.", + "additionalProperties": { + "type": [ "string", "null" ], + "description": "Proper values must be strings. A value of 'null' removes the variable from the environment." + } + } + }, + "required": [ "args", "cwd" ] + }, + "RunInTerminalResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'runInTerminal' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "processId": { + "type": "integer", + "description": "The process ID. The value should be less than or equal to 2147483647 (2^31-1)." + }, + "shellProcessId": { + "type": "integer", + "description": "The process ID of the terminal shell. The value should be less than or equal to 2147483647 (2^31-1)." + } + } + } + }, + "required": [ "body" ] + }] + }, + + "InitializeRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "title": "Requests", + "description": "The 'initialize' request is sent as the first request from the client to the debug adapter\nin order to configure it with client capabilities and to retrieve capabilities from the debug adapter.\nUntil the debug adapter has responded to with an 'initialize' response, the client must not send any additional requests or events to the debug adapter.\nIn addition the debug adapter is not allowed to send any requests or events to the client until it has responded with an 'initialize' response.\nThe 'initialize' request may only be sent once.", + "properties": { + "command": { + "type": "string", + "enum": [ "initialize" ] + }, + "arguments": { + "$ref": "#/definitions/InitializeRequestArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "InitializeRequestArguments": { + "type": "object", + "description": "Arguments for 'initialize' request.", + "properties": { + "clientID": { + "type": "string", + "description": "The ID of the (frontend) client using this adapter." + }, + "clientName": { + "type": "string", + "description": "The human readable name of the (frontend) client using this adapter." + }, + "adapterID": { + "type": "string", + "description": "The ID of the debug adapter." + }, + "locale": { + "type": "string", + "description": "The ISO-639 locale of the (frontend) client using this adapter, e.g. en-US or de-CH." + }, + "linesStartAt1": { + "type": "boolean", + "description": "If true all line numbers are 1-based (default)." + }, + "columnsStartAt1": { + "type": "boolean", + "description": "If true all column numbers are 1-based (default)." + }, + "pathFormat": { + "type": "string", + "_enum": [ "path", "uri" ], + "description": "Determines in what format paths are specified. The default is 'path', which is the native format." + }, + "supportsVariableType": { + "type": "boolean", + "description": "Client supports the optional type attribute for variables." + }, + "supportsVariablePaging": { + "type": "boolean", + "description": "Client supports the paging of variables." + }, + "supportsRunInTerminalRequest": { + "type": "boolean", + "description": "Client supports the runInTerminal request." + }, + "supportsMemoryReferences": { + "type": "boolean", + "description": "Client supports memory references." + }, + "supportsProgressReporting": { + "type": "boolean", + "description": "Client supports progress reporting." + }, + "supportsInvalidatedEvent": { + "type": "boolean", + "description": "Client supports the invalidated event." + }, + "supportsMemoryEvent": { + "type": "boolean", + "description": "Client supports the memory event." + } + }, + "required": [ "adapterID" ] + }, + "InitializeResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'initialize' request.", + "properties": { + "body": { + "$ref": "#/definitions/Capabilities", + "description": "The capabilities of this debug adapter." + } + } + }] + }, + + "ConfigurationDoneRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "This optional request indicates that the client has finished initialization of the debug adapter.\nSo it is the last request in the sequence of configuration requests (which was started by the 'initialized' event).\nClients should only call this request if the capability 'supportsConfigurationDoneRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "configurationDone" ] + }, + "arguments": { + "$ref": "#/definitions/ConfigurationDoneArguments" + } + }, + "required": [ "command" ] + }] + }, + "ConfigurationDoneArguments": { + "type": "object", + "description": "Arguments for 'configurationDone' request." + }, + "ConfigurationDoneResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'configurationDone' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "LaunchRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "This launch request is sent from the client to the debug adapter to start the debuggee with or without debugging (if 'noDebug' is true).\nSince launching is debugger/runtime specific, the arguments for this request are not part of this specification.", + "properties": { + "command": { + "type": "string", + "enum": [ "launch" ] + }, + "arguments": { + "$ref": "#/definitions/LaunchRequestArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "LaunchRequestArguments": { + "type": "object", + "description": "Arguments for 'launch' request. Additional attributes are implementation specific.", + "properties": { + "noDebug": { + "type": "boolean", + "description": "If noDebug is true the launch request should launch the program without enabling debugging." + }, + "__restart": { + "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], + "description": "Optional data from the previous, restarted session.\nThe data is sent as the 'restart' attribute of the 'terminated' event.\nThe client should leave the data intact." + } + } + }, + "LaunchResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'launch' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "AttachRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The attach request is sent from the client to the debug adapter to attach to a debuggee that is already running.\nSince attaching is debugger/runtime specific, the arguments for this request are not part of this specification.", + "properties": { + "command": { + "type": "string", + "enum": [ "attach" ] + }, + "arguments": { + "$ref": "#/definitions/AttachRequestArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "AttachRequestArguments": { + "type": "object", + "description": "Arguments for 'attach' request. Additional attributes are implementation specific.", + "properties": { + "__restart": { + "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], + "description": "Optional data from the previous, restarted session.\nThe data is sent as the 'restart' attribute of the 'terminated' event.\nThe client should leave the data intact." + } + } + }, + "AttachResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'attach' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "RestartRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Restarts a debug session. Clients should only call this request if the capability 'supportsRestartRequest' is true.\nIf the capability is missing or has the value false, a typical client will emulate 'restart' by terminating the debug adapter first and then launching it anew.", + "properties": { + "command": { + "type": "string", + "enum": [ "restart" ] + }, + "arguments": { + "$ref": "#/definitions/RestartArguments" + } + }, + "required": [ "command" ] + }] + }, + "RestartArguments": { + "type": "object", + "description": "Arguments for 'restart' request.", + "properties": { + "arguments": { + "oneOf": [ + { "$ref": "#/definitions/LaunchRequestArguments" }, + { "$ref": "#/definitions/AttachRequestArguments" } + ], + "description": "The latest version of the 'launch' or 'attach' configuration." + } + } + }, + "RestartResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'restart' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "DisconnectRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The 'disconnect' request is sent from the client to the debug adapter in order to stop debugging.\nIt asks the debug adapter to disconnect from the debuggee and to terminate the debug adapter.\nIf the debuggee has been started with the 'launch' request, the 'disconnect' request terminates the debuggee.\nIf the 'attach' request was used to connect to the debuggee, 'disconnect' does not terminate the debuggee.\nThis behavior can be controlled with the 'terminateDebuggee' argument (if supported by the debug adapter).", + "properties": { + "command": { + "type": "string", + "enum": [ "disconnect" ] + }, + "arguments": { + "$ref": "#/definitions/DisconnectArguments" + } + }, + "required": [ "command" ] + }] + }, + "DisconnectArguments": { + "type": "object", + "description": "Arguments for 'disconnect' request.", + "properties": { + "restart": { + "type": "boolean", + "description": "A value of true indicates that this 'disconnect' request is part of a restart sequence." + }, + "terminateDebuggee": { + "type": "boolean", + "description": "Indicates whether the debuggee should be terminated when the debugger is disconnected.\nIf unspecified, the debug adapter is free to do whatever it thinks is best.\nThe attribute is only honored by a debug adapter if the capability 'supportTerminateDebuggee' is true." + }, + "suspendDebuggee": { + "type": "boolean", + "description": "Indicates whether the debuggee should stay suspended when the debugger is disconnected.\nIf unspecified, the debuggee should resume execution.\nThe attribute is only honored by a debug adapter if the capability 'supportSuspendDebuggee' is true." + } + } + }, + "DisconnectResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'disconnect' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "TerminateRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The 'terminate' request is sent from the client to the debug adapter in order to give the debuggee a chance for terminating itself.\nClients should only call this request if the capability 'supportsTerminateRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "terminate" ] + }, + "arguments": { + "$ref": "#/definitions/TerminateArguments" + } + }, + "required": [ "command" ] + }] + }, + "TerminateArguments": { + "type": "object", + "description": "Arguments for 'terminate' request.", + "properties": { + "restart": { + "type": "boolean", + "description": "A value of true indicates that this 'terminate' request is part of a restart sequence." + } + } + }, + "TerminateResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'terminate' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "BreakpointLocationsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The 'breakpointLocations' request returns all possible locations for source breakpoints in a given range.\nClients should only call this request if the capability 'supportsBreakpointLocationsRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "breakpointLocations" ] + }, + "arguments": { + "$ref": "#/definitions/BreakpointLocationsArguments" + } + }, + "required": [ "command" ] + }] + + }, + "BreakpointLocationsArguments": { + "type": "object", + "description": "Arguments for 'breakpointLocations' request.", + "properties": { + "source": { + "$ref": "#/definitions/Source", + "description": "The source location of the breakpoints; either 'source.path' or 'source.reference' must be specified." + }, + "line": { + "type": "integer", + "description": "Start line of range to search possible breakpoint locations in. If only the line is specified, the request returns all possible locations in that line." + }, + "column": { + "type": "integer", + "description": "Optional start column of range to search possible breakpoint locations in. If no start column is given, the first column in the start line is assumed." + }, + "endLine": { + "type": "integer", + "description": "Optional end line of range to search possible breakpoint locations in. If no end line is given, then the end line is assumed to be the start line." + }, + "endColumn": { + "type": "integer", + "description": "Optional end column of range to search possible breakpoint locations in. If no end column is given, then it is assumed to be in the last column of the end line." + } + }, + "required": [ "source", "line" ] + }, + "BreakpointLocationsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'breakpointLocations' request.\nContains possible locations for source breakpoints.", + "properties": { + "body": { + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/BreakpointLocation" + }, + "description": "Sorted set of possible breakpoint locations." + } + }, + "required": [ "breakpoints" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetBreakpointsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Sets multiple breakpoints for a single source and clears all previous breakpoints in that source.\nTo clear all breakpoint for a source, specify an empty array.\nWhen a breakpoint is hit, a 'stopped' event (with reason 'breakpoint') is generated.", + "properties": { + "command": { + "type": "string", + "enum": [ "setBreakpoints" ] + }, + "arguments": { + "$ref": "#/definitions/SetBreakpointsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetBreakpointsArguments": { + "type": "object", + "description": "Arguments for 'setBreakpoints' request.", + "properties": { + "source": { + "$ref": "#/definitions/Source", + "description": "The source location of the breakpoints; either 'source.path' or 'source.reference' must be specified." + }, + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/SourceBreakpoint" + }, + "description": "The code locations of the breakpoints." + }, + "lines": { + "type": "array", + "items": { + "type": "integer" + }, + "description": "Deprecated: The code locations of the breakpoints." + }, + "sourceModified": { + "type": "boolean", + "description": "A value of true indicates that the underlying source has been modified which results in new breakpoint locations." + } + }, + "required": [ "source" ] + }, + "SetBreakpointsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setBreakpoints' request.\nReturned is information about each breakpoint created by this request.\nThis includes the actual code location and whether the breakpoint could be verified.\nThe breakpoints returned are in the same order as the elements of the 'breakpoints'\n(or the deprecated 'lines') array in the arguments.", + "properties": { + "body": { + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the breakpoints.\nThe array elements are in the same order as the elements of the 'breakpoints' (or the deprecated 'lines') array in the arguments." + } + }, + "required": [ "breakpoints" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetFunctionBreakpointsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Replaces all existing function breakpoints with new function breakpoints.\nTo clear all function breakpoints, specify an empty array.\nWhen a function breakpoint is hit, a 'stopped' event (with reason 'function breakpoint') is generated.\nClients should only call this request if the capability 'supportsFunctionBreakpoints' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "setFunctionBreakpoints" ] + }, + "arguments": { + "$ref": "#/definitions/SetFunctionBreakpointsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetFunctionBreakpointsArguments": { + "type": "object", + "description": "Arguments for 'setFunctionBreakpoints' request.", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/FunctionBreakpoint" + }, + "description": "The function names of the breakpoints." + } + }, + "required": [ "breakpoints" ] + }, + "SetFunctionBreakpointsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setFunctionBreakpoints' request.\nReturned is information about each breakpoint created by this request.", + "properties": { + "body": { + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the breakpoints. The array elements correspond to the elements of the 'breakpoints' array." + } + }, + "required": [ "breakpoints" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetExceptionBreakpointsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request configures the debuggers response to thrown exceptions.\nIf an exception is configured to break, a 'stopped' event is fired (with reason 'exception').\nClients should only call this request if the capability 'exceptionBreakpointFilters' returns one or more filters.", + "properties": { + "command": { + "type": "string", + "enum": [ "setExceptionBreakpoints" ] + }, + "arguments": { + "$ref": "#/definitions/SetExceptionBreakpointsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetExceptionBreakpointsArguments": { + "type": "object", + "description": "Arguments for 'setExceptionBreakpoints' request.", + "properties": { + "filters": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Set of exception filters specified by their ID. The set of all possible exception filters is defined by the 'exceptionBreakpointFilters' capability. The 'filter' and 'filterOptions' sets are additive." + }, + "filterOptions": { + "type": "array", + "items": { + "$ref": "#/definitions/ExceptionFilterOptions" + }, + "description": "Set of exception filters and their options. The set of all possible exception filters is defined by the 'exceptionBreakpointFilters' capability. This attribute is only honored by a debug adapter if the capability 'supportsExceptionFilterOptions' is true. The 'filter' and 'filterOptions' sets are additive." + }, + "exceptionOptions": { + "type": "array", + "items": { + "$ref": "#/definitions/ExceptionOptions" + }, + "description": "Configuration options for selected exceptions.\nThe attribute is only honored by a debug adapter if the capability 'supportsExceptionOptions' is true." + } + }, + "required": [ "filters" ] + }, + "SetExceptionBreakpointsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setExceptionBreakpoints' request.\nThe response contains an array of Breakpoint objects with information about each exception breakpoint or filter. The Breakpoint objects are in the same order as the elements of the 'filters', 'filterOptions', 'exceptionOptions' arrays given as arguments. If both 'filters' and 'filterOptions' are given, the returned array must start with 'filters' information first, followed by 'filterOptions' information.\nThe mandatory 'verified' property of a Breakpoint object signals whether the exception breakpoint or filter could be successfully created and whether the optional condition or hit count expressions are valid. In case of an error the 'message' property explains the problem. An optional 'id' property can be used to introduce a unique ID for the exception breakpoint or filter so that it can be updated subsequently by sending breakpoint events.\nFor backward compatibility both the 'breakpoints' array and the enclosing 'body' are optional. If these elements are missing a client will not be able to show problems for individual exception breakpoints or filters.", + "properties": { + "body": { + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the exception breakpoints or filters.\nThe breakpoints returned are in the same order as the elements of the 'filters', 'filterOptions', 'exceptionOptions' arrays in the arguments. If both 'filters' and 'filterOptions' are given, the returned array must start with 'filters' information first, followed by 'filterOptions' information." + } + } + } + } + }] + }, + + "DataBreakpointInfoRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Obtains information on a possible data breakpoint that could be set on an expression or variable.\nClients should only call this request if the capability 'supportsDataBreakpoints' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "dataBreakpointInfo" ] + }, + "arguments": { + "$ref": "#/definitions/DataBreakpointInfoArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "DataBreakpointInfoArguments": { + "type": "object", + "description": "Arguments for 'dataBreakpointInfo' request.", + "properties": { + "variablesReference": { + "type": "integer", + "description": "Reference to the Variable container if the data breakpoint is requested for a child of the container." + }, + "name": { + "type": "string", + "description": "The name of the Variable's child to obtain data breakpoint information for.\nIf variablesReference isn't provided, this can be an expression." + } + }, + "required": [ "name" ] + }, + "DataBreakpointInfoResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'dataBreakpointInfo' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "dataId": { + "type": [ "string", "null" ], + "description": "An identifier for the data on which a data breakpoint can be registered with the setDataBreakpoints request or null if no data breakpoint is available." + }, + "description": { + "type": "string", + "description": "UI string that describes on what data the breakpoint is set on or why a data breakpoint is not available." + }, + "accessTypes": { + "type": "array", + "items": { + "$ref": "#/definitions/DataBreakpointAccessType" + }, + "description": "Optional attribute listing the available access types for a potential data breakpoint. A UI frontend could surface this information." + }, + "canPersist": { + "type": "boolean", + "description": "Optional attribute indicating that a potential data breakpoint could be persisted across sessions." + } + }, + "required": [ "dataId", "description" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetDataBreakpointsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Replaces all existing data breakpoints with new data breakpoints.\nTo clear all data breakpoints, specify an empty array.\nWhen a data breakpoint is hit, a 'stopped' event (with reason 'data breakpoint') is generated.\nClients should only call this request if the capability 'supportsDataBreakpoints' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "setDataBreakpoints" ] + }, + "arguments": { + "$ref": "#/definitions/SetDataBreakpointsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetDataBreakpointsArguments": { + "type": "object", + "description": "Arguments for 'setDataBreakpoints' request.", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/DataBreakpoint" + }, + "description": "The contents of this array replaces all existing data breakpoints. An empty array clears all data breakpoints." + } + }, + "required": [ "breakpoints" ] + }, + "SetDataBreakpointsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setDataBreakpoints' request.\nReturned is information about each breakpoint created by this request.", + "properties": { + "body": { + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the data breakpoints. The array elements correspond to the elements of the input argument 'breakpoints' array." + } + }, + "required": [ "breakpoints" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetInstructionBreakpointsRequest": { + "allOf": [ + { "$ref": "#/definitions/Request" }, + { + "type": "object", + "description": "Replaces all existing instruction breakpoints. Typically, instruction breakpoints would be set from a diassembly window. \nTo clear all instruction breakpoints, specify an empty array.\nWhen an instruction breakpoint is hit, a 'stopped' event (with reason 'instruction breakpoint') is generated.\nClients should only call this request if the capability 'supportsInstructionBreakpoints' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "setInstructionBreakpoints" ] + }, + "arguments": { + "$ref": "#/definitions/SetInstructionBreakpointsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetInstructionBreakpointsArguments": { + "type": "object", + "description": "Arguments for 'setInstructionBreakpoints' request", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/InstructionBreakpoint" + }, + "description": "The instruction references of the breakpoints" + } + }, + "required": ["breakpoints"] + }, + "SetInstructionBreakpointsResponse": { + "allOf": [ + { "$ref": "#/definitions/Response" }, + { + "type": "object", + "description": "Response to 'setInstructionBreakpoints' request", + "properties": { + "body": { + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the breakpoints. The array elements correspond to the elements of the 'breakpoints' array." + } + }, + "required": [ "breakpoints" ] + } + }, + "required": [ "body" ] + }] + }, + + "ContinueRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request resumes execution of all threads. If the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true resumes only the specified thread. If not all threads were resumed, the 'allThreadsContinued' attribute of the response must be set to false.", + "properties": { + "command": { + "type": "string", + "enum": [ "continue" ] + }, + "arguments": { + "$ref": "#/definitions/ContinueArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ContinueArguments": { + "type": "object", + "description": "Arguments for 'continue' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Specifies the active thread. If the debug adapter supports single thread execution (see 'supportsSingleThreadExecutionRequests') and the optional argument 'singleThread' is true, only the thread with this ID is resumed." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, execution is resumed only for the thread with given 'threadId'." + } + }, + "required": [ "threadId" ] + }, + "ContinueResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'continue' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "allThreadsContinued": { + "type": "boolean", + "description": "The value true (or a missing property) signals to the client that all threads have been resumed. The value false must be returned if not all threads were resumed." + } + } + } + }, + "required": [ "body" ] + }] + }, + + "NextRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request executes one step (in the given granularity) for the specified thread and allows all other threads to run freely by resuming them.\nIf the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other suspended threads from resuming.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.", + "properties": { + "command": { + "type": "string", + "enum": [ "next" ] + }, + "arguments": { + "$ref": "#/definitions/NextArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "NextArguments": { + "type": "object", + "description": "Arguments for 'next' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Specifies the thread for which to resume execution for one step (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." + }, + "granularity": { + "$ref": "#/definitions/SteppingGranularity", + "description": "Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed." + } + }, + "required": [ "threadId" ] + }, + "NextResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'next' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "StepInRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request resumes the given thread to step into a function/method and allows all other threads to run freely by resuming them.\nIf the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other suspended threads from resuming.\nIf the request cannot step into a target, 'stepIn' behaves like the 'next' request.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.\nIf there are multiple function/method calls (or other targets) on the source line,\nthe optional argument 'targetId' can be used to control into which target the 'stepIn' should occur.\nThe list of possible targets for a given source line can be retrieved via the 'stepInTargets' request.", + "properties": { + "command": { + "type": "string", + "enum": [ "stepIn" ] + }, + "arguments": { + "$ref": "#/definitions/StepInArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StepInArguments": { + "type": "object", + "description": "Arguments for 'stepIn' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Specifies the thread for which to resume execution for one step-into (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." + }, + "targetId": { + "type": "integer", + "description": "Optional id of the target to step into." + }, + "granularity": { + "$ref": "#/definitions/SteppingGranularity", + "description": "Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed." + } + }, + "required": [ "threadId" ] + }, + "StepInResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stepIn' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "StepOutRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request resumes the given thread to step out (return) from a function/method and allows all other threads to run freely by resuming them.\nIf the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other suspended threads from resuming.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.", + "properties": { + "command": { + "type": "string", + "enum": [ "stepOut" ] + }, + "arguments": { + "$ref": "#/definitions/StepOutArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StepOutArguments": { + "type": "object", + "description": "Arguments for 'stepOut' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Specifies the thread for which to resume execution for one step-out (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." + }, + "granularity": { + "$ref": "#/definitions/SteppingGranularity", + "description": "Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed." + } + }, + "required": [ "threadId" ] + }, + "StepOutResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stepOut' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "StepBackRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request executes one backward step (in the given granularity) for the specified thread and allows all other threads to run backward freely by resuming them.\nIf the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other suspended threads from resuming.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.\nClients should only call this request if the capability 'supportsStepBack' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "stepBack" ] + }, + "arguments": { + "$ref": "#/definitions/StepBackArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StepBackArguments": { + "type": "object", + "description": "Arguments for 'stepBack' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Specifies the thread for which to resume execution for one step backwards (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." + }, + "granularity": { + "$ref": "#/definitions/SteppingGranularity", + "description": "Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed." + } + }, + "required": [ "threadId" ] + }, + "StepBackResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stepBack' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "ReverseContinueRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request resumes backward execution of all threads. If the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true resumes only the specified thread. If not all threads were resumed, the 'allThreadsContinued' attribute of the response must be set to false.\nClients should only call this request if the capability 'supportsStepBack' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "reverseContinue" ] + }, + "arguments": { + "$ref": "#/definitions/ReverseContinueArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ReverseContinueArguments": { + "type": "object", + "description": "Arguments for 'reverseContinue' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Specifies the active thread. If the debug adapter supports single thread execution (see 'supportsSingleThreadExecutionRequests') and the optional argument 'singleThread' is true, only the thread with this ID is resumed." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, backward execution is resumed only for the thread with given 'threadId'." + } + + }, + "required": [ "threadId" ] + }, + "ReverseContinueResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'reverseContinue' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "RestartFrameRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request restarts execution of the specified stackframe.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'restart') after the restart has completed.\nClients should only call this request if the capability 'supportsRestartFrame' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "restartFrame" ] + }, + "arguments": { + "$ref": "#/definitions/RestartFrameArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "RestartFrameArguments": { + "type": "object", + "description": "Arguments for 'restartFrame' request.", + "properties": { + "frameId": { + "type": "integer", + "description": "Restart this stackframe." + } + }, + "required": [ "frameId" ] + }, + "RestartFrameResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'restartFrame' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "GotoRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request sets the location where the debuggee will continue to run.\nThis makes it possible to skip the execution of code or to executed code again.\nThe code between the current location and the goto target is not executed but skipped.\nThe debug adapter first sends the response and then a 'stopped' event with reason 'goto'.\nClients should only call this request if the capability 'supportsGotoTargetsRequest' is true (because only then goto targets exist that can be passed as arguments).", + "properties": { + "command": { + "type": "string", + "enum": [ "goto" ] + }, + "arguments": { + "$ref": "#/definitions/GotoArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "GotoArguments": { + "type": "object", + "description": "Arguments for 'goto' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Set the goto target for this thread." + }, + "targetId": { + "type": "integer", + "description": "The location where the debuggee will continue to run." + } + }, + "required": [ "threadId", "targetId" ] + }, + "GotoResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'goto' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "PauseRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request suspends the debuggee.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'pause') after the thread has been paused successfully.", + "properties": { + "command": { + "type": "string", + "enum": [ "pause" ] + }, + "arguments": { + "$ref": "#/definitions/PauseArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "PauseArguments": { + "type": "object", + "description": "Arguments for 'pause' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Pause execution for this thread." + } + }, + "required": [ "threadId" ] + }, + "PauseResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'pause' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "StackTraceRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request returns a stacktrace from the current execution state of a given thread.\nA client can request all stack frames by omitting the startFrame and levels arguments. For performance conscious clients and if the debug adapter's 'supportsDelayedStackTraceLoading' capability is true, stack frames can be retrieved in a piecemeal way with the startFrame and levels arguments. The response of the stackTrace request may contain a totalFrames property that hints at the total number of frames in the stack. If a client needs this total number upfront, it can issue a request for a single (first) frame and depending on the value of totalFrames decide how to proceed. In any case a client should be prepared to receive less frames than requested, which is an indication that the end of the stack has been reached.", + "properties": { + "command": { + "type": "string", + "enum": [ "stackTrace" ] + }, + "arguments": { + "$ref": "#/definitions/StackTraceArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StackTraceArguments": { + "type": "object", + "description": "Arguments for 'stackTrace' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Retrieve the stacktrace for this thread." + }, + "startFrame": { + "type": "integer", + "description": "The index of the first frame to return; if omitted frames start at 0." + }, + "levels": { + "type": "integer", + "description": "The maximum number of frames to return. If levels is not specified or 0, all frames are returned." + }, + "format": { + "$ref": "#/definitions/StackFrameFormat", + "description": "Specifies details on how to format the stack frames.\nThe attribute is only honored by a debug adapter if the capability 'supportsValueFormattingOptions' is true." + } + }, + "required": [ "threadId" ] + }, + "StackTraceResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stackTrace' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "stackFrames": { + "type": "array", + "items": { + "$ref": "#/definitions/StackFrame" + }, + "description": "The frames of the stackframe. If the array has length zero, there are no stackframes available.\nThis means that there is no location information available." + }, + "totalFrames": { + "type": "integer", + "description": "The total number of frames available in the stack. If omitted or if totalFrames is larger than the available frames, a client is expected to request frames until a request returns less frames than requested (which indicates the end of the stack). Returning monotonically increasing totalFrames values for subsequent requests can be used to enforce paging in the client." + } + }, + "required": [ "stackFrames" ] + } + }, + "required": [ "body" ] + }] + }, + + "ScopesRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request returns the variable scopes for a given stackframe ID.", + "properties": { + "command": { + "type": "string", + "enum": [ "scopes" ] + }, + "arguments": { + "$ref": "#/definitions/ScopesArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ScopesArguments": { + "type": "object", + "description": "Arguments for 'scopes' request.", + "properties": { + "frameId": { + "type": "integer", + "description": "Retrieve the scopes for this stackframe." + } + }, + "required": [ "frameId" ] + }, + "ScopesResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'scopes' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "scopes": { + "type": "array", + "items": { + "$ref": "#/definitions/Scope" + }, + "description": "The scopes of the stackframe. If the array has length zero, there are no scopes available." + } + }, + "required": [ "scopes" ] + } + }, + "required": [ "body" ] + }] + }, + + "VariablesRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Retrieves all child variables for the given variable reference.\nAn optional filter can be used to limit the fetched children to either named or indexed children.", + "properties": { + "command": { + "type": "string", + "enum": [ "variables" ] + }, + "arguments": { + "$ref": "#/definitions/VariablesArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "VariablesArguments": { + "type": "object", + "description": "Arguments for 'variables' request.", + "properties": { + "variablesReference": { + "type": "integer", + "description": "The Variable reference." + }, + "filter": { + "type": "string", + "enum": [ "indexed", "named" ], + "description": "Optional filter to limit the child variables to either named or indexed. If omitted, both types are fetched." + }, + "start": { + "type": "integer", + "description": "The index of the first variable to return; if omitted children start at 0." + }, + "count": { + "type": "integer", + "description": "The number of variables to return. If count is missing or 0, all variables are returned." + }, + "format": { + "$ref": "#/definitions/ValueFormat", + "description": "Specifies details on how to format the Variable values.\nThe attribute is only honored by a debug adapter if the capability 'supportsValueFormattingOptions' is true." + } + }, + "required": [ "variablesReference" ] + }, + "VariablesResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'variables' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "variables": { + "type": "array", + "items": { + "$ref": "#/definitions/Variable" + }, + "description": "All (or a range) of variables for the given variable reference." + } + }, + "required": [ "variables" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetVariableRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Set the variable with the given name in the variable container to a new value. Clients should only call this request if the capability 'supportsSetVariable' is true.\nIf a debug adapter implements both setVariable and setExpression, a client will only use setExpression if the variable has an evaluateName property.", + "properties": { + "command": { + "type": "string", + "enum": [ "setVariable" ] + }, + "arguments": { + "$ref": "#/definitions/SetVariableArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetVariableArguments": { + "type": "object", + "description": "Arguments for 'setVariable' request.", + "properties": { + "variablesReference": { + "type": "integer", + "description": "The reference of the variable container." + }, + "name": { + "type": "string", + "description": "The name of the variable in the container." + }, + "value": { + "type": "string", + "description": "The value of the variable." + }, + "format": { + "$ref": "#/definitions/ValueFormat", + "description": "Specifies details on how to format the response value." + } + }, + "required": [ "variablesReference", "name", "value" ] + }, + "SetVariableResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setVariable' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "value": { + "type": "string", + "description": "The new value of the variable." + }, + "type": { + "type": "string", + "description": "The type of the new value. Typically shown in the UI when hovering over the value." + }, + "variablesReference": { + "type": "integer", + "description": "If variablesReference is > 0, the new value is structured and its children can be retrieved by passing variablesReference to the VariablesRequest.\nThe value should be less than or equal to 2147483647 (2^31-1)." + }, + "namedVariables": { + "type": "integer", + "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks.\nThe value should be less than or equal to 2147483647 (2^31-1)." + }, + "indexedVariables": { + "type": "integer", + "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks.\nThe value should be less than or equal to 2147483647 (2^31-1)." + } + }, + "required": [ "value" ] + } + }, + "required": [ "body" ] + }] + }, + + "SourceRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request retrieves the source code for a given source reference.", + "properties": { + "command": { + "type": "string", + "enum": [ "source" ] + }, + "arguments": { + "$ref": "#/definitions/SourceArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SourceArguments": { + "type": "object", + "description": "Arguments for 'source' request.", + "properties": { + "source": { + "$ref": "#/definitions/Source", + "description": "Specifies the source content to load. Either source.path or source.sourceReference must be specified." + }, + "sourceReference": { + "type": "integer", + "description": "The reference to the source. This is the same as source.sourceReference.\nThis is provided for backward compatibility since old backends do not understand the 'source' attribute." + } + }, + "required": [ "sourceReference" ] + }, + "SourceResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'source' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "content": { + "type": "string", + "description": "Content of the source reference." + }, + "mimeType": { + "type": "string", + "description": "Optional content type (mime type) of the source." + } + }, + "required": [ "content" ] + } + }, + "required": [ "body" ] + }] + }, + + "ThreadsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request retrieves a list of all threads.", + "properties": { + "command": { + "type": "string", + "enum": [ "threads" ] + } + }, + "required": [ "command" ] + }] + }, + "ThreadsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'threads' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "threads": { + "type": "array", + "items": { + "$ref": "#/definitions/Thread" + }, + "description": "All threads." + } + }, + "required": [ "threads" ] + } + }, + "required": [ "body" ] + }] + }, + + "TerminateThreadsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "The request terminates the threads with the given ids.\nClients should only call this request if the capability 'supportsTerminateThreadsRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "terminateThreads" ] + }, + "arguments": { + "$ref": "#/definitions/TerminateThreadsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "TerminateThreadsArguments": { + "type": "object", + "description": "Arguments for 'terminateThreads' request.", + "properties": { + "threadIds": { + "type": "array", + "items": { + "type": "integer" + }, + "description": "Ids of threads to be terminated." + } + } + }, + "TerminateThreadsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'terminateThreads' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "ModulesRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Modules can be retrieved from the debug adapter with this request which can either return all modules or a range of modules to support paging.\nClients should only call this request if the capability 'supportsModulesRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "modules" ] + }, + "arguments": { + "$ref": "#/definitions/ModulesArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ModulesArguments": { + "type": "object", + "description": "Arguments for 'modules' request.", + "properties": { + "startModule": { + "type": "integer", + "description": "The index of the first module to return; if omitted modules start at 0." + }, + "moduleCount": { + "type": "integer", + "description": "The number of modules to return. If moduleCount is not specified or 0, all modules are returned." + } + } + }, + "ModulesResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'modules' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "modules": { + "type": "array", + "items": { + "$ref": "#/definitions/Module" + }, + "description": "All modules or range of modules." + }, + "totalModules": { + "type": "integer", + "description": "The total number of modules available." + } + }, + "required": [ "modules" ] + } + }, + "required": [ "body" ] + }] + }, + + "LoadedSourcesRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Retrieves the set of all sources currently loaded by the debugged process.\nClients should only call this request if the capability 'supportsLoadedSourcesRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "loadedSources" ] + }, + "arguments": { + "$ref": "#/definitions/LoadedSourcesArguments" + } + }, + "required": [ "command" ] + }] + }, + "LoadedSourcesArguments": { + "type": "object", + "description": "Arguments for 'loadedSources' request." + }, + "LoadedSourcesResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'loadedSources' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "sources": { + "type": "array", + "items": { + "$ref": "#/definitions/Source" + }, + "description": "Set of loaded sources." + } + }, + "required": [ "sources" ] + } + }, + "required": [ "body" ] + }] + }, + + "EvaluateRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Evaluates the given expression in the context of the top most stack frame.\nThe expression has access to any variables and arguments that are in scope.", + "properties": { + "command": { + "type": "string", + "enum": [ "evaluate" ] + }, + "arguments": { + "$ref": "#/definitions/EvaluateArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "EvaluateArguments": { + "type": "object", + "description": "Arguments for 'evaluate' request.", + "properties": { + "expression": { + "type": "string", + "description": "The expression to evaluate." + }, + "frameId": { + "type": "integer", + "description": "Evaluate the expression in the scope of this stack frame. If not specified, the expression is evaluated in the global scope." + }, + "context": { + "type": "string", + "_enum": [ "watch", "repl", "hover", "clipboard" ], + "enumDescriptions": [ + "evaluate is run in a watch.", + "evaluate is run from REPL console.", + "evaluate is run from a data hover.", + "evaluate is run to generate the value that will be stored in the clipboard.\nThe attribute is only honored by a debug adapter if the capability 'supportsClipboardContext' is true." + ], + "description": "The context in which the evaluate request is run." + }, + "format": { + "$ref": "#/definitions/ValueFormat", + "description": "Specifies details on how to format the Evaluate result.\nThe attribute is only honored by a debug adapter if the capability 'supportsValueFormattingOptions' is true." + } + }, + "required": [ "expression" ] + }, + "EvaluateResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'evaluate' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "result": { + "type": "string", + "description": "The result of the evaluate request." + }, + "type": { + "type": "string", + "description": "The optional type of the evaluate result.\nThis attribute should only be returned by a debug adapter if the client has passed the value true for the 'supportsVariableType' capability of the 'initialize' request." + }, + "presentationHint": { + "$ref": "#/definitions/VariablePresentationHint", + "description": "Properties of a evaluate result that can be used to determine how to render the result in the UI." + }, + "variablesReference": { + "type": "integer", + "description": "If variablesReference is > 0, the evaluate result is structured and its children can be retrieved by passing variablesReference to the VariablesRequest.\nThe value should be less than or equal to 2147483647 (2^31-1)." + }, + "namedVariables": { + "type": "integer", + "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks.\nThe value should be less than or equal to 2147483647 (2^31-1)." + }, + "indexedVariables": { + "type": "integer", + "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks.\nThe value should be less than or equal to 2147483647 (2^31-1)." + }, + "memoryReference": { + "type": "string", + "description": "Optional memory reference to a location appropriate for this result.\nFor pointer type eval results, this is generally a reference to the memory address contained in the pointer.\nThis attribute should be returned by a debug adapter if the client has passed the value true for the 'supportsMemoryReferences' capability of the 'initialize' request." + } + }, + "required": [ "result", "variablesReference" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetExpressionRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Evaluates the given 'value' expression and assigns it to the 'expression' which must be a modifiable l-value.\nThe expressions have access to any variables and arguments that are in scope of the specified frame.\nClients should only call this request if the capability 'supportsSetExpression' is true.\nIf a debug adapter implements both setExpression and setVariable, a client will only use setExpression if the variable has an evaluateName property.", + "properties": { + "command": { + "type": "string", + "enum": [ "setExpression" ] + }, + "arguments": { + "$ref": "#/definitions/SetExpressionArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetExpressionArguments": { + "type": "object", + "description": "Arguments for 'setExpression' request.", + "properties": { + "expression": { + "type": "string", + "description": "The l-value expression to assign to." + }, + "value": { + "type": "string", + "description": "The value expression to assign to the l-value expression." + }, + "frameId": { + "type": "integer", + "description": "Evaluate the expressions in the scope of this stack frame. If not specified, the expressions are evaluated in the global scope." + }, + "format": { + "$ref": "#/definitions/ValueFormat", + "description": "Specifies how the resulting value should be formatted." + } + }, + "required": [ "expression", "value" ] + }, + "SetExpressionResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setExpression' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "value": { + "type": "string", + "description": "The new value of the expression." + }, + "type": { + "type": "string", + "description": "The optional type of the value.\nThis attribute should only be returned by a debug adapter if the client has passed the value true for the 'supportsVariableType' capability of the 'initialize' request." + }, + "presentationHint": { + "$ref": "#/definitions/VariablePresentationHint", + "description": "Properties of a value that can be used to determine how to render the result in the UI." + }, + "variablesReference": { + "type": "integer", + "description": "If variablesReference is > 0, the value is structured and its children can be retrieved by passing variablesReference to the VariablesRequest.\nThe value should be less than or equal to 2147483647 (2^31-1)." + }, + "namedVariables": { + "type": "integer", + "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks.\nThe value should be less than or equal to 2147483647 (2^31-1)." + }, + "indexedVariables": { + "type": "integer", + "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks.\nThe value should be less than or equal to 2147483647 (2^31-1)." + } + }, + "required": [ "value" ] + } + }, + "required": [ "body" ] + }] + }, + + "StepInTargetsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "This request retrieves the possible stepIn targets for the specified stack frame.\nThese targets can be used in the 'stepIn' request.\nThe StepInTargets may only be called if the 'supportsStepInTargetsRequest' capability exists and is true.\nClients should only call this request if the capability 'supportsStepInTargetsRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "stepInTargets" ] + }, + "arguments": { + "$ref": "#/definitions/StepInTargetsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StepInTargetsArguments": { + "type": "object", + "description": "Arguments for 'stepInTargets' request.", + "properties": { + "frameId": { + "type": "integer", + "description": "The stack frame for which to retrieve the possible stepIn targets." + } + }, + "required": [ "frameId" ] + }, + "StepInTargetsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stepInTargets' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "targets": { + "type": "array", + "items": { + "$ref": "#/definitions/StepInTarget" + }, + "description": "The possible stepIn targets of the specified source location." + } + }, + "required": [ "targets" ] + } + }, + "required": [ "body" ] + }] + }, + + "GotoTargetsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "This request retrieves the possible goto targets for the specified source location.\nThese targets can be used in the 'goto' request.\nClients should only call this request if the capability 'supportsGotoTargetsRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "gotoTargets" ] + }, + "arguments": { + "$ref": "#/definitions/GotoTargetsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "GotoTargetsArguments": { + "type": "object", + "description": "Arguments for 'gotoTargets' request.", + "properties": { + "source": { + "$ref": "#/definitions/Source", + "description": "The source location for which the goto targets are determined." + }, + "line": { + "type": "integer", + "description": "The line location for which the goto targets are determined." + }, + "column": { + "type": "integer", + "description": "An optional column location for which the goto targets are determined." + } + }, + "required": [ "source", "line" ] + }, + "GotoTargetsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'gotoTargets' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "targets": { + "type": "array", + "items": { + "$ref": "#/definitions/GotoTarget" + }, + "description": "The possible goto targets of the specified location." + } + }, + "required": [ "targets" ] + } + }, + "required": [ "body" ] + }] + }, + + "CompletionsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Returns a list of possible completions for a given caret position and text.\nClients should only call this request if the capability 'supportsCompletionsRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "completions" ] + }, + "arguments": { + "$ref": "#/definitions/CompletionsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "CompletionsArguments": { + "type": "object", + "description": "Arguments for 'completions' request.", + "properties": { + "frameId": { + "type": "integer", + "description": "Returns completions in the scope of this stack frame. If not specified, the completions are returned for the global scope." + }, + "text": { + "type": "string", + "description": "One or more source lines. Typically this is the text a user has typed into the debug console before he asked for completion." + }, + "column": { + "type": "integer", + "description": "The character position for which to determine the completion proposals." + }, + "line": { + "type": "integer", + "description": "An optional line for which to determine the completion proposals. If missing the first line of the text is assumed." + } + }, + "required": [ "text", "column" ] + }, + "CompletionsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'completions' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "targets": { + "type": "array", + "items": { + "$ref": "#/definitions/CompletionItem" + }, + "description": "The possible completions for ." + } + }, + "required": [ "targets" ] + } + }, + "required": [ "body" ] + }] + }, + + "ExceptionInfoRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Retrieves the details of the exception that caused this event to be raised.\nClients should only call this request if the capability 'supportsExceptionInfoRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "exceptionInfo" ] + }, + "arguments": { + "$ref": "#/definitions/ExceptionInfoArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ExceptionInfoArguments": { + "type": "object", + "description": "Arguments for 'exceptionInfo' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Thread for which exception information should be retrieved." + } + }, + "required": [ "threadId" ] + }, + "ExceptionInfoResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'exceptionInfo' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "exceptionId": { + "type": "string", + "description": "ID of the exception that was thrown." + }, + "description": { + "type": "string", + "description": "Descriptive text for the exception provided by the debug adapter." + }, + "breakMode": { + "$ref": "#/definitions/ExceptionBreakMode", + "description": "Mode that caused the exception notification to be raised." + }, + "details": { + "$ref": "#/definitions/ExceptionDetails", + "description": "Detailed information about the exception." + } + }, + "required": [ "exceptionId", "breakMode" ] + } + }, + "required": [ "body" ] + }] + }, + + "ReadMemoryRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Reads bytes from memory at the provided location.\nClients should only call this request if the capability 'supportsReadMemoryRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "readMemory" ] + }, + "arguments": { + "$ref": "#/definitions/ReadMemoryArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ReadMemoryArguments": { + "type": "object", + "description": "Arguments for 'readMemory' request.", + "properties": { + "memoryReference": { + "type": "string", + "description": "Memory reference to the base location from which data should be read." + }, + "offset": { + "type": "integer", + "description": "Optional offset (in bytes) to be applied to the reference location before reading data. Can be negative." + }, + "count": { + "type": "integer", + "description": "Number of bytes to read at the specified location and offset." + } + }, + "required": [ "memoryReference", "count" ] + }, + "ReadMemoryResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'readMemory' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "address": { + "type": "string", + "description": "The address of the first byte of data returned.\nTreated as a hex value if prefixed with '0x', or as a decimal value otherwise." + }, + "unreadableBytes": { + "type": "integer", + "description": "The number of unreadable bytes encountered after the last successfully read byte.\nThis can be used to determine the number of bytes that must be skipped before a subsequent 'readMemory' request will succeed." + }, + "data": { + "type": "string", + "description": "The bytes read from memory, encoded using base64." + } + }, + "required": [ "address" ] + } + } + }] + }, + + "WriteMemoryRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Writes bytes to memory at the provided location.\nClients should only call this request if the capability 'supportsWriteMemoryRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "writeMemory" ] + }, + "arguments": { + "$ref": "#/definitions/WriteMemoryArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "WriteMemoryArguments": { + "type": "object", + "description": "Arguments for 'writeMemory' request.", + "properties": { + "memoryReference": { + "type": "string", + "description": "Memory reference to the base location to which data should be written." + }, + "offset": { + "type": "integer", + "description": "Optional offset (in bytes) to be applied to the reference location before writing data. Can be negative." + }, + "allowPartial": { + "type": "boolean", + "description": "Optional property to control partial writes. If true, the debug adapter should attempt to write memory even if the entire memory region is not writable. In such a case the debug adapter should stop after hitting the first byte of memory that cannot be written and return the number of bytes written in the response via the 'offset' and 'bytesWritten' properties.\nIf false or missing, a debug adapter should attempt to verify the region is writable before writing, and fail the response if it is not." + }, + "data": { + "type": "string", + "description": "Bytes to write, encoded using base64." + } + }, + "required": [ "memoryReference", "data" ] + }, + "WriteMemoryResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'writeMemory' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "offset": { + "type": "integer", + "description": "Optional property that should be returned when 'allowPartial' is true to indicate the offset of the first byte of data successfully written. Can be negative." + }, + "bytesWritten": { + "type": "integer", + "description": "Optional property that should be returned when 'allowPartial' is true to indicate the number of bytes starting from address that were successfully written." + } + } + } + } + }] + }, + + "DisassembleRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Disassembles code stored at the provided location.\nClients should only call this request if the capability 'supportsDisassembleRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "disassemble" ] + }, + "arguments": { + "$ref": "#/definitions/DisassembleArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "DisassembleArguments": { + "type": "object", + "description": "Arguments for 'disassemble' request.", + "properties": { + "memoryReference": { + "type": "string", + "description": "Memory reference to the base location containing the instructions to disassemble." + }, + "offset": { + "type": "integer", + "description": "Optional offset (in bytes) to be applied to the reference location before disassembling. Can be negative." + }, + "instructionOffset": { + "type": "integer", + "description": "Optional offset (in instructions) to be applied after the byte offset (if any) before disassembling. Can be negative." + }, + "instructionCount": { + "type": "integer", + "description": "Number of instructions to disassemble starting at the specified location and offset.\nAn adapter must return exactly this number of instructions - any unavailable instructions should be replaced with an implementation-defined 'invalid instruction' value." + }, + "resolveSymbols": { + "type": "boolean", + "description": "If true, the adapter should attempt to resolve memory addresses and other values to symbolic names." + } + }, + "required": [ "memoryReference", "instructionCount" ] + }, + "DisassembleResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'disassemble' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "instructions": { + "type": "array", + "items": { + "$ref": "#/definitions/DisassembledInstruction" + }, + "description": "The list of disassembled instructions." + } + }, + "required": [ "instructions" ] + } + } + }] + }, + + "Capabilities": { + "type": "object", + "title": "Types", + "description": "Information about the capabilities of a debug adapter.", + "properties": { + "supportsConfigurationDoneRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'configurationDone' request." + }, + "supportsFunctionBreakpoints": { + "type": "boolean", + "description": "The debug adapter supports function breakpoints." + }, + "supportsConditionalBreakpoints": { + "type": "boolean", + "description": "The debug adapter supports conditional breakpoints." + }, + "supportsHitConditionalBreakpoints": { + "type": "boolean", + "description": "The debug adapter supports breakpoints that break execution after a specified number of hits." + }, + "supportsEvaluateForHovers": { + "type": "boolean", + "description": "The debug adapter supports a (side effect free) evaluate request for data hovers." + }, + "exceptionBreakpointFilters": { + "type": "array", + "items": { + "$ref": "#/definitions/ExceptionBreakpointsFilter" + }, + "description": "Available exception filter options for the 'setExceptionBreakpoints' request." + }, + "supportsStepBack": { + "type": "boolean", + "description": "The debug adapter supports stepping back via the 'stepBack' and 'reverseContinue' requests." + }, + "supportsSetVariable": { + "type": "boolean", + "description": "The debug adapter supports setting a variable to a value." + }, + "supportsRestartFrame": { + "type": "boolean", + "description": "The debug adapter supports restarting a frame." + }, + "supportsGotoTargetsRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'gotoTargets' request." + }, + "supportsStepInTargetsRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'stepInTargets' request." + }, + "supportsCompletionsRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'completions' request." + }, + "completionTriggerCharacters": { + "type": "array", + "items": { + "type": "string" + }, + "description": "The set of characters that should trigger completion in a REPL. If not specified, the UI should assume the '.' character." + }, + "supportsModulesRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'modules' request." + }, + "additionalModuleColumns": { + "type": "array", + "items": { + "$ref": "#/definitions/ColumnDescriptor" + }, + "description": "The set of additional module information exposed by the debug adapter." + }, + "supportedChecksumAlgorithms": { + "type": "array", + "items": { + "$ref": "#/definitions/ChecksumAlgorithm" + }, + "description": "Checksum algorithms supported by the debug adapter." + }, + "supportsRestartRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'restart' request. In this case a client should not implement 'restart' by terminating and relaunching the adapter but by calling the RestartRequest." + }, + "supportsExceptionOptions": { + "type": "boolean", + "description": "The debug adapter supports 'exceptionOptions' on the setExceptionBreakpoints request." + }, + "supportsValueFormattingOptions": { + "type": "boolean", + "description": "The debug adapter supports a 'format' attribute on the stackTraceRequest, variablesRequest, and evaluateRequest." + }, + "supportsExceptionInfoRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'exceptionInfo' request." + }, + "supportTerminateDebuggee": { + "type": "boolean", + "description": "The debug adapter supports the 'terminateDebuggee' attribute on the 'disconnect' request." + }, + "supportSuspendDebuggee": { + "type": "boolean", + "description": "The debug adapter supports the 'suspendDebuggee' attribute on the 'disconnect' request." + }, + "supportsDelayedStackTraceLoading": { + "type": "boolean", + "description": "The debug adapter supports the delayed loading of parts of the stack, which requires that both the 'startFrame' and 'levels' arguments and an optional 'totalFrames' result of the 'StackTrace' request are supported." + }, + "supportsLoadedSourcesRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'loadedSources' request." + }, + "supportsLogPoints": { + "type": "boolean", + "description": "The debug adapter supports logpoints by interpreting the 'logMessage' attribute of the SourceBreakpoint." + }, + "supportsTerminateThreadsRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'terminateThreads' request." + }, + "supportsSetExpression": { + "type": "boolean", + "description": "The debug adapter supports the 'setExpression' request." + }, + "supportsTerminateRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'terminate' request." + }, + "supportsDataBreakpoints": { + "type": "boolean", + "description": "The debug adapter supports data breakpoints." + }, + "supportsReadMemoryRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'readMemory' request." + }, + "supportsWriteMemoryRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'writeMemory' request." + }, + "supportsDisassembleRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'disassemble' request." + }, + "supportsCancelRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'cancel' request." + }, + "supportsBreakpointLocationsRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'breakpointLocations' request." + }, + "supportsClipboardContext": { + "type": "boolean", + "description": "The debug adapter supports the 'clipboard' context value in the 'evaluate' request." + }, + "supportsSteppingGranularity": { + "type": "boolean", + "description": "The debug adapter supports stepping granularities (argument 'granularity') for the stepping requests." + }, + "supportsInstructionBreakpoints": { + "type": "boolean", + "description": "The debug adapter supports adding breakpoints based on instruction references." + }, + "supportsExceptionFilterOptions": { + "type": "boolean", + "description": "The debug adapter supports 'filterOptions' as an argument on the 'setExceptionBreakpoints' request." + }, + "supportsSingleThreadExecutionRequests": { + "type": "boolean", + "description": "The debug adapter supports the 'singleThread' property on the execution requests ('continue', 'next', 'stepIn', 'stepOut', 'reverseContinue', 'stepBack')." + } + } + }, + + "ExceptionBreakpointsFilter": { + "type": "object", + "description": "An ExceptionBreakpointsFilter is shown in the UI as an filter option for configuring how exceptions are dealt with.", + "properties": { + "filter": { + "type": "string", + "description": "The internal ID of the filter option. This value is passed to the 'setExceptionBreakpoints' request." + }, + "label": { + "type": "string", + "description": "The name of the filter option. This will be shown in the UI." + }, + "description": { + "type": "string", + "description": "An optional help text providing additional information about the exception filter. This string is typically shown as a hover and must be translated." + }, + "default": { + "type": "boolean", + "description": "Initial value of the filter option. If not specified a value 'false' is assumed." + }, + "supportsCondition": { + "type": "boolean", + "description": "Controls whether a condition can be specified for this filter option. If false or missing, a condition can not be set." + }, + "conditionDescription": { + "type": "string", + "description": "An optional help text providing information about the condition. This string is shown as the placeholder text for a text box and must be translated." + } + }, + "required": [ "filter", "label" ] + }, + + "Message": { + "type": "object", + "description": "A structured message object. Used to return errors from requests.", + "properties": { + "id": { + "type": "integer", + "description": "Unique identifier for the message." + }, + "format": { + "type": "string", + "description": "A format string for the message. Embedded variables have the form '{name}'.\nIf variable name starts with an underscore character, the variable does not contain user data (PII) and can be safely used for telemetry purposes." + }, + "variables": { + "type": "object", + "description": "An object used as a dictionary for looking up the variables in the format string.", + "additionalProperties": { + "type": "string", + "description": "Values must be strings." + } + }, + "sendTelemetry": { + "type": "boolean", + "description": "If true send to telemetry." + }, + "showUser": { + "type": "boolean", + "description": "If true show user." + }, + "url": { + "type": "string", + "description": "An optional url where additional information about this message can be found." + }, + "urlLabel": { + "type": "string", + "description": "An optional label that is presented to the user as the UI for opening the url." + } + }, + "required": [ "id", "format" ] + }, + + "Module": { + "type": "object", + "description": "A Module object represents a row in the modules view.\nTwo attributes are mandatory: an id identifies a module in the modules view and is used in a ModuleEvent for identifying a module for adding, updating or deleting.\nThe name is used to minimally render the module in the UI.\n\nAdditional attributes can be added to the module. They will show up in the module View if they have a corresponding ColumnDescriptor.\n\nTo avoid an unnecessary proliferation of additional attributes with similar semantics but different names\nwe recommend to re-use attributes from the 'recommended' list below first, and only introduce new attributes if nothing appropriate could be found.", + "properties": { + "id": { + "type": ["integer", "string"], + "description": "Unique identifier for the module." + }, + "name": { + "type": "string", + "description": "A name of the module." + }, + "path": { + "type": "string", + "description": "optional but recommended attributes.\nalways try to use these first before introducing additional attributes.\n\nLogical full path to the module. The exact definition is implementation defined, but usually this would be a full path to the on-disk file for the module." + }, + "isOptimized": { + "type": "boolean", + "description": "True if the module is optimized." + }, + "isUserCode": { + "type": "boolean", + "description": "True if the module is considered 'user code' by a debugger that supports 'Just My Code'." + }, + "version": { + "type": "string", + "description": "Version of Module." + }, + "symbolStatus": { + "type": "string", + "description": "User understandable description of if symbols were found for the module (ex: 'Symbols Loaded', 'Symbols not found', etc." + }, + "symbolFilePath": { + "type": "string", + "description": "Logical full path to the symbol file. The exact definition is implementation defined." + }, + "dateTimeStamp": { + "type": "string", + "description": "Module created or modified." + }, + "addressRange": { + "type": "string", + "description": "Address range covered by this module." + } + }, + "required": [ "id", "name" ] + }, + + "ColumnDescriptor": { + "type": "object", + "description": "A ColumnDescriptor specifies what module attribute to show in a column of the ModulesView, how to format it,\nand what the column's label should be.\nIt is only used if the underlying UI actually supports this level of customization.", + "properties": { + "attributeName": { + "type": "string", + "description": "Name of the attribute rendered in this column." + }, + "label": { + "type": "string", + "description": "Header UI label of column." + }, + "format": { + "type": "string", + "description": "Format to use for the rendered values in this column. TBD how the format strings looks like." + }, + "type": { + "type": "string", + "enum": [ "string", "number", "boolean", "unixTimestampUTC" ], + "description": "Datatype of values in this column. Defaults to 'string' if not specified." + }, + "width": { + "type": "integer", + "description": "Width of this column in characters (hint only)." + } + }, + "required": [ "attributeName", "label"] + }, + + "ModulesViewDescriptor": { + "type": "object", + "description": "The ModulesViewDescriptor is the container for all declarative configuration options of a ModuleView.\nFor now it only specifies the columns to be shown in the modules view.", + "properties": { + "columns": { + "type": "array", + "items": { + "$ref": "#/definitions/ColumnDescriptor" + } + } + }, + "required": [ "columns" ] + }, + + "Thread": { + "type": "object", + "description": "A Thread", + "properties": { + "id": { + "type": "integer", + "description": "Unique identifier for the thread." + }, + "name": { + "type": "string", + "description": "A name of the thread." + } + }, + "required": [ "id", "name" ] + }, + + "Source": { + "type": "object", + "description": "A Source is a descriptor for source code.\nIt is returned from the debug adapter as part of a StackFrame and it is used by clients when specifying breakpoints.", + "properties": { + "name": { + "type": "string", + "description": "The short name of the source. Every source returned from the debug adapter has a name.\nWhen sending a source to the debug adapter this name is optional." + }, + "path": { + "type": "string", + "description": "The path of the source to be shown in the UI.\nIt is only used to locate and load the content of the source if no sourceReference is specified (or its value is 0)." + }, + "sourceReference": { + "type": "integer", + "description": "If sourceReference > 0 the contents of the source must be retrieved through the SourceRequest (even if a path is specified).\nA sourceReference is only valid for a session, so it must not be used to persist a source.\nThe value should be less than or equal to 2147483647 (2^31-1)." + }, + "presentationHint": { + "type": "string", + "description": "An optional hint for how to present the source in the UI.\nA value of 'deemphasize' can be used to indicate that the source is not available or that it is skipped on stepping.", + "enum": [ "normal", "emphasize", "deemphasize" ] + }, + "origin": { + "type": "string", + "description": "The (optional) origin of this source: possible values 'internal module', 'inlined content from source map', etc." + }, + "sources": { + "type": "array", + "items": { + "$ref": "#/definitions/Source" + }, + "description": "An optional list of sources that are related to this source. These may be the source that generated this source." + }, + "adapterData": { + "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], + "description": "Optional data that a debug adapter might want to loop through the client.\nThe client should leave the data intact and persist it across sessions. The client should not interpret the data." + }, + "checksums": { + "type": "array", + "items": { + "$ref": "#/definitions/Checksum" + }, + "description": "The checksums associated with this file." + } + } + }, + + "StackFrame": { + "type": "object", + "description": "A Stackframe contains the source location.", + "properties": { + "id": { + "type": "integer", + "description": "An identifier for the stack frame. It must be unique across all threads.\nThis id can be used to retrieve the scopes of the frame with the 'scopesRequest' or to restart the execution of a stackframe." + }, + "name": { + "type": "string", + "description": "The name of the stack frame, typically a method name." + }, + "source": { + "$ref": "#/definitions/Source", + "description": "The optional source of the frame." + }, + "line": { + "type": "integer", + "description": "The line within the file of the frame. If source is null or doesn't exist, line is 0 and must be ignored." + }, + "column": { + "type": "integer", + "description": "The column within the line. If source is null or doesn't exist, column is 0 and must be ignored." + }, + "endLine": { + "type": "integer", + "description": "An optional end line of the range covered by the stack frame." + }, + "endColumn": { + "type": "integer", + "description": "An optional end column of the range covered by the stack frame." + }, + "canRestart": { + "type": "boolean", + "description": "Indicates whether this frame can be restarted with the 'restart' request. Clients should only use this if the debug adapter supports the 'restart' request (capability 'supportsRestartRequest' is true)." + }, + "instructionPointerReference": { + "type": "string", + "description": "Optional memory reference for the current instruction pointer in this frame." + }, + "moduleId": { + "type": ["integer", "string"], + "description": "The module associated with this frame, if any." + }, + "presentationHint": { + "type": "string", + "enum": [ "normal", "label", "subtle" ], + "description": "An optional hint for how to present this frame in the UI.\nA value of 'label' can be used to indicate that the frame is an artificial frame that is used as a visual label or separator. A value of 'subtle' can be used to change the appearance of a frame in a 'subtle' way." + } + }, + "required": [ "id", "name", "line", "column" ] + }, + + "Scope": { + "type": "object", + "description": "A Scope is a named container for variables. Optionally a scope can map to a source or a range within a source.", + "properties": { + "name": { + "type": "string", + "description": "Name of the scope such as 'Arguments', 'Locals', or 'Registers'. This string is shown in the UI as is and can be translated." + }, + "presentationHint": { + "type": "string", + "description": "An optional hint for how to present this scope in the UI. If this attribute is missing, the scope is shown with a generic UI.", + "_enum": [ "arguments", "locals", "registers" ], + "enumDescriptions": [ + "Scope contains method arguments.", + "Scope contains local variables.", + "Scope contains registers. Only a single 'registers' scope should be returned from a 'scopes' request." + ] + }, + "variablesReference": { + "type": "integer", + "description": "The variables of this scope can be retrieved by passing the value of variablesReference to the VariablesRequest." + }, + "namedVariables": { + "type": "integer", + "description": "The number of named variables in this scope.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." + }, + "indexedVariables": { + "type": "integer", + "description": "The number of indexed variables in this scope.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." + }, + "expensive": { + "type": "boolean", + "description": "If true, the number of variables in this scope is large or expensive to retrieve." + }, + "source": { + "$ref": "#/definitions/Source", + "description": "Optional source for this scope." + }, + "line": { + "type": "integer", + "description": "Optional start line of the range covered by this scope." + }, + "column": { + "type": "integer", + "description": "Optional start column of the range covered by this scope." + }, + "endLine": { + "type": "integer", + "description": "Optional end line of the range covered by this scope." + }, + "endColumn": { + "type": "integer", + "description": "Optional end column of the range covered by this scope." + } + }, + "required": [ "name", "variablesReference", "expensive" ] + }, + + "Variable": { + "type": "object", + "description": "A Variable is a name/value pair.\nOptionally a variable can have a 'type' that is shown if space permits or when hovering over the variable's name.\nAn optional 'kind' is used to render additional properties of the variable, e.g. different icons can be used to indicate that a variable is public or private.\nIf the value is structured (has children), a handle is provided to retrieve the children with the VariablesRequest.\nIf the number of named or indexed children is large, the numbers should be returned via the optional 'namedVariables' and 'indexedVariables' attributes.\nThe client can use this optional information to present the children in a paged UI and fetch them in chunks.", + "properties": { + "name": { + "type": "string", + "description": "The variable's name." + }, + "value": { + "type": "string", + "description": "The variable's value. This can be a multi-line text, e.g. for a function the body of a function." + }, + "type": { + "type": "string", + "description": "The type of the variable's value. Typically shown in the UI when hovering over the value.\nThis attribute should only be returned by a debug adapter if the client has passed the value true for the 'supportsVariableType' capability of the 'initialize' request." + }, + "presentationHint": { + "$ref": "#/definitions/VariablePresentationHint", + "description": "Properties of a variable that can be used to determine how to render the variable in the UI." + }, + "evaluateName": { + "type": "string", + "description": "Optional evaluatable name of this variable which can be passed to the 'EvaluateRequest' to fetch the variable's value." + }, + "variablesReference": { + "type": "integer", + "description": "If variablesReference is > 0, the variable is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." + }, + "namedVariables": { + "type": "integer", + "description": "The number of named child variables.\nThe client can use this optional information to present the children in a paged UI and fetch them in chunks." + }, + "indexedVariables": { + "type": "integer", + "description": "The number of indexed child variables.\nThe client can use this optional information to present the children in a paged UI and fetch them in chunks." + }, + "memoryReference": { + "type": "string", + "description": "Optional memory reference for the variable if the variable represents executable code, such as a function pointer.\nThis attribute is only required if the client has passed the value true for the 'supportsMemoryReferences' capability of the 'initialize' request." + } + }, + "required": [ "name", "value", "variablesReference" ] + }, + + "VariablePresentationHint": { + "type": "object", + "description": "Optional properties of a variable that can be used to determine how to render the variable in the UI.", + "properties": { + "kind": { + "description": "The kind of variable. Before introducing additional values, try to use the listed values.", + "type": "string", + "_enum": [ "property", "method", "class", "data", "event", "baseClass", "innerClass", "interface", "mostDerivedClass", "virtual", "dataBreakpoint" ], + "enumDescriptions": [ + "Indicates that the object is a property.", + "Indicates that the object is a method.", + "Indicates that the object is a class.", + "Indicates that the object is data.", + "Indicates that the object is an event.", + "Indicates that the object is a base class.", + "Indicates that the object is an inner class.", + "Indicates that the object is an interface.", + "Indicates that the object is the most derived class.", + "Indicates that the object is virtual, that means it is a synthetic object introducedby the\nadapter for rendering purposes, e.g. an index range for large arrays.", + "Deprecated: Indicates that a data breakpoint is registered for the object. The 'hasDataBreakpoint' attribute should generally be used instead." + ] + }, + "attributes": { + "description": "Set of attributes represented as an array of strings. Before introducing additional values, try to use the listed values.", + "type": "array", + "items": { + "type": "string", + "_enum": [ "static", "constant", "readOnly", "rawString", "hasObjectId", "canHaveObjectId", "hasSideEffects", "hasDataBreakpoint" ], + "enumDescriptions": [ + "Indicates that the object is static.", + "Indicates that the object is a constant.", + "Indicates that the object is read only.", + "Indicates that the object is a raw string.", + "Indicates that the object can have an Object ID created for it.", + "Indicates that the object has an Object ID associated with it.", + "Indicates that the evaluation had side effects.", + "Indicates that the object has its value tracked by a data breakpoint." + ] + } + }, + "visibility": { + "description": "Visibility of variable. Before introducing additional values, try to use the listed values.", + "type": "string", + "_enum": [ "public", "private", "protected", "internal", "final" ] + } + } + }, + + "BreakpointLocation": { + "type": "object", + "description": "Properties of a breakpoint location returned from the 'breakpointLocations' request.", + "properties": { + "line": { + "type": "integer", + "description": "Start line of breakpoint location." + }, + "column": { + "type": "integer", + "description": "Optional start column of breakpoint location." + }, + "endLine": { + "type": "integer", + "description": "Optional end line of breakpoint location if the location covers a range." + }, + "endColumn": { + "type": "integer", + "description": "Optional end column of breakpoint location if the location covers a range." + } + }, + "required": [ "line" ] + }, + + "SourceBreakpoint": { + "type": "object", + "description": "Properties of a breakpoint or logpoint passed to the setBreakpoints request.", + "properties": { + "line": { + "type": "integer", + "description": "The source line of the breakpoint or logpoint." + }, + "column": { + "type": "integer", + "description": "An optional source column of the breakpoint." + }, + "condition": { + "type": "string", + "description": "An optional expression for conditional breakpoints.\nIt is only honored by a debug adapter if the capability 'supportsConditionalBreakpoints' is true." + }, + "hitCondition": { + "type": "string", + "description": "An optional expression that controls how many hits of the breakpoint are ignored.\nThe backend is expected to interpret the expression as needed.\nThe attribute is only honored by a debug adapter if the capability 'supportsHitConditionalBreakpoints' is true." + }, + "logMessage": { + "type": "string", + "description": "If this attribute exists and is non-empty, the backend must not 'break' (stop)\nbut log the message instead. Expressions within {} are interpolated.\nThe attribute is only honored by a debug adapter if the capability 'supportsLogPoints' is true." + } + }, + "required": [ "line" ] + }, + + "FunctionBreakpoint": { + "type": "object", + "description": "Properties of a breakpoint passed to the setFunctionBreakpoints request.", + "properties": { + "name": { + "type": "string", + "description": "The name of the function." + }, + "condition": { + "type": "string", + "description": "An optional expression for conditional breakpoints.\nIt is only honored by a debug adapter if the capability 'supportsConditionalBreakpoints' is true." + }, + "hitCondition": { + "type": "string", + "description": "An optional expression that controls how many hits of the breakpoint are ignored.\nThe backend is expected to interpret the expression as needed.\nThe attribute is only honored by a debug adapter if the capability 'supportsHitConditionalBreakpoints' is true." + } + }, + "required": [ "name" ] + }, + + "DataBreakpointAccessType": { + "type": "string", + "description": "This enumeration defines all possible access types for data breakpoints.", + "enum": [ "read", "write", "readWrite" ] + }, + + "DataBreakpoint": { + "type": "object", + "description": "Properties of a data breakpoint passed to the setDataBreakpoints request.", + "properties": { + "dataId": { + "type": "string", + "description": "An id representing the data. This id is returned from the dataBreakpointInfo request." + }, + "accessType": { + "$ref": "#/definitions/DataBreakpointAccessType", + "description": "The access type of the data." + }, + "condition": { + "type": "string", + "description": "An optional expression for conditional breakpoints." + }, + "hitCondition": { + "type": "string", + "description": "An optional expression that controls how many hits of the breakpoint are ignored.\nThe backend is expected to interpret the expression as needed." + } + }, + "required": [ "dataId" ] + }, + + "InstructionBreakpoint": { + "type": "object", + "description": "Properties of a breakpoint passed to the setInstructionBreakpoints request", + "properties": { + "instructionReference": { + "type": "string", + "description": "The instruction reference of the breakpoint.\nThis should be a memory or instruction pointer reference from an EvaluateResponse, Variable, StackFrame, GotoTarget, or Breakpoint." + }, + "offset": { + "type": "integer", + "description": "An optional offset from the instruction reference.\nThis can be negative." + }, + "condition": { + "type": "string", + "description": "An optional expression for conditional breakpoints.\nIt is only honored by a debug adapter if the capability 'supportsConditionalBreakpoints' is true." + }, + "hitCondition": { + "type": "string", + "description": "An optional expression that controls how many hits of the breakpoint are ignored.\nThe backend is expected to interpret the expression as needed.\nThe attribute is only honored by a debug adapter if the capability 'supportsHitConditionalBreakpoints' is true." + } + }, + "required": [ "instructionReference" ] + }, + + "Breakpoint": { + "type": "object", + "description": "Information about a Breakpoint created in setBreakpoints, setFunctionBreakpoints, setInstructionBreakpoints, or setDataBreakpoints.", + "properties": { + "id": { + "type": "integer", + "description": "An optional identifier for the breakpoint. It is needed if breakpoint events are used to update or remove breakpoints." + }, + "verified": { + "type": "boolean", + "description": "If true breakpoint could be set (but not necessarily at the desired location)." + }, + "message": { + "type": "string", + "description": "An optional message about the state of the breakpoint.\nThis is shown to the user and can be used to explain why a breakpoint could not be verified." + }, + "source": { + "$ref": "#/definitions/Source", + "description": "The source where the breakpoint is located." + }, + "line": { + "type": "integer", + "description": "The start line of the actual range covered by the breakpoint." + }, + "column": { + "type": "integer", + "description": "An optional start column of the actual range covered by the breakpoint." + }, + "endLine": { + "type": "integer", + "description": "An optional end line of the actual range covered by the breakpoint." + }, + "endColumn": { + "type": "integer", + "description": "An optional end column of the actual range covered by the breakpoint.\nIf no end line is given, then the end column is assumed to be in the start line." + }, + "instructionReference": { + "type": "string", + "description": "An optional memory reference to where the breakpoint is set." + }, + "offset": { + "type": "integer", + "description": "An optional offset from the instruction reference.\nThis can be negative." + } + }, + "required": [ "verified" ] + }, + + "SteppingGranularity": { + "type": "string", + "description": "The granularity of one 'step' in the stepping requests 'next', 'stepIn', 'stepOut', and 'stepBack'.", + "enum": [ "statement", "line", "instruction" ], + "enumDescriptions": [ + "The step should allow the program to run until the current statement has finished executing.\nThe meaning of a statement is determined by the adapter and it may be considered equivalent to a line.\nFor example 'for(int i = 0; i < 10; i++) could be considered to have 3 statements 'int i = 0', 'i < 10', and 'i++'.", + "The step should allow the program to run until the current source line has executed.", + "The step should allow one instruction to execute (e.g. one x86 instruction)." + ] + }, + + "StepInTarget": { + "type": "object", + "description": "A StepInTarget can be used in the 'stepIn' request and determines into which single target the stepIn request should step.", + "properties": { + "id": { + "type": "integer", + "description": "Unique identifier for a stepIn target." + }, + "label": { + "type": "string", + "description": "The name of the stepIn target (shown in the UI)." + } + }, + "required": [ "id", "label" ] + }, + + "GotoTarget": { + "type": "object", + "description": "A GotoTarget describes a code location that can be used as a target in the 'goto' request.\nThe possible goto targets can be determined via the 'gotoTargets' request.", + "properties": { + "id": { + "type": "integer", + "description": "Unique identifier for a goto target. This is used in the goto request." + }, + "label": { + "type": "string", + "description": "The name of the goto target (shown in the UI)." + }, + "line": { + "type": "integer", + "description": "The line of the goto target." + }, + "column": { + "type": "integer", + "description": "An optional column of the goto target." + }, + "endLine": { + "type": "integer", + "description": "An optional end line of the range covered by the goto target." + }, + "endColumn": { + "type": "integer", + "description": "An optional end column of the range covered by the goto target." + }, + "instructionPointerReference": { + "type": "string", + "description": "Optional memory reference for the instruction pointer value represented by this target." + } + }, + "required": [ "id", "label", "line" ] + }, + + "CompletionItem": { + "type": "object", + "description": "CompletionItems are the suggestions returned from the CompletionsRequest.", + "properties": { + "label": { + "type": "string", + "description": "The label of this completion item. By default this is also the text that is inserted when selecting this completion." + }, + "text": { + "type": "string", + "description": "If text is not falsy then it is inserted instead of the label." + }, + "sortText": { + "type": "string", + "description": "A string that should be used when comparing this item with other items. When `falsy` the label is used." + }, + "type": { + "$ref": "#/definitions/CompletionItemType", + "description": "The item's type. Typically the client uses this information to render the item in the UI with an icon." + }, + "start": { + "type": "integer", + "description": "This value determines the location (in the CompletionsRequest's 'text' attribute) where the completion text is added.\nIf missing the text is added at the location specified by the CompletionsRequest's 'column' attribute." + }, + "length": { + "type": "integer", + "description": "This value determines how many characters are overwritten by the completion text.\nIf missing the value 0 is assumed which results in the completion text being inserted." + }, + "selectionStart": { + "type": "integer", + "description": "Determines the start of the new selection after the text has been inserted (or replaced).\nThe start position must in the range 0 and length of the completion text.\nIf omitted the selection starts at the end of the completion text." + }, + "selectionLength": { + "type": "integer", + "description": "Determines the length of the new selection after the text has been inserted (or replaced).\nThe selection can not extend beyond the bounds of the completion text.\nIf omitted the length is assumed to be 0." + } + }, + "required": [ "label" ] + }, + + "CompletionItemType": { + "type": "string", + "description": "Some predefined types for the CompletionItem. Please note that not all clients have specific icons for all of them.", + "enum": [ "method", "function", "constructor", "field", "variable", "class", "interface", "module", "property", "unit", "value", "enum", "keyword", "snippet", "text", "color", "file", "reference", "customcolor" ] + }, + + "ChecksumAlgorithm": { + "type": "string", + "description": "Names of checksum algorithms that may be supported by a debug adapter.", + "enum": [ "MD5", "SHA1", "SHA256", "timestamp" ] + }, + + "Checksum": { + "type": "object", + "description": "The checksum of an item calculated by the specified algorithm.", + "properties": { + "algorithm": { + "$ref": "#/definitions/ChecksumAlgorithm", + "description": "The algorithm used to calculate this checksum." + }, + "checksum": { + "type": "string", + "description": "Value of the checksum." + } + }, + "required": [ "algorithm", "checksum" ] + }, + + "ValueFormat": { + "type": "object", + "description": "Provides formatting information for a value.", + "properties": { + "hex": { + "type": "boolean", + "description": "Display the value in hex." + } + } + }, + + "StackFrameFormat": { + "allOf": [ { "$ref": "#/definitions/ValueFormat" }, { + "type": "object", + "description": "Provides formatting information for a stack frame.", + "properties": { + "parameters": { + "type": "boolean", + "description": "Displays parameters for the stack frame." + }, + "parameterTypes": { + "type": "boolean", + "description": "Displays the types of parameters for the stack frame." + }, + "parameterNames": { + "type": "boolean", + "description": "Displays the names of parameters for the stack frame." + }, + "parameterValues": { + "type": "boolean", + "description": "Displays the values of parameters for the stack frame." + }, + "line": { + "type": "boolean", + "description": "Displays the line number of the stack frame." + }, + "module": { + "type": "boolean", + "description": "Displays the module of the stack frame." + }, + "includeAll": { + "type": "boolean", + "description": "Includes all stack frames, including those the debug adapter might otherwise hide." + } + } + }] + }, + + "ExceptionFilterOptions": { + "type": "object", + "description": "An ExceptionFilterOptions is used to specify an exception filter together with a condition for the setExceptionsFilter request.", + "properties": { + "filterId": { + "type": "string", + "description": "ID of an exception filter returned by the 'exceptionBreakpointFilters' capability." + }, + "condition": { + "type": "string", + "description": "An optional expression for conditional exceptions.\nThe exception will break into the debugger if the result of the condition is true." + } + }, + "required": [ "filterId" ] + }, + + "ExceptionOptions": { + "type": "object", + "description": "An ExceptionOptions assigns configuration options to a set of exceptions.", + "properties": { + "path": { + "type": "array", + "items": { + "$ref": "#/definitions/ExceptionPathSegment" + }, + "description": "A path that selects a single or multiple exceptions in a tree. If 'path' is missing, the whole tree is selected.\nBy convention the first segment of the path is a category that is used to group exceptions in the UI." + }, + "breakMode": { + "$ref": "#/definitions/ExceptionBreakMode", + "description": "Condition when a thrown exception should result in a break." + } + }, + "required": [ "breakMode" ] + }, + + "ExceptionBreakMode": { + "type": "string", + "description": "This enumeration defines all possible conditions when a thrown exception should result in a break.\nnever: never breaks,\nalways: always breaks,\nunhandled: breaks when exception unhandled,\nuserUnhandled: breaks if the exception is not handled by user code.", + "enum": [ "never", "always", "unhandled", "userUnhandled" ] + }, + + "ExceptionPathSegment": { + "type": "object", + "description": "An ExceptionPathSegment represents a segment in a path that is used to match leafs or nodes in a tree of exceptions.\nIf a segment consists of more than one name, it matches the names provided if 'negate' is false or missing or\nit matches anything except the names provided if 'negate' is true.", + "properties": { + "negate": { + "type": "boolean", + "description": "If false or missing this segment matches the names provided, otherwise it matches anything except the names provided." + }, + "names": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Depending on the value of 'negate' the names that should match or not match." + } + }, + "required": [ "names" ] + }, + + "ExceptionDetails": { + "type": "object", + "description": "Detailed information about an exception that has occurred.", + "properties": { + "message": { + "type": "string", + "description": "Message contained in the exception." + }, + "typeName": { + "type": "string", + "description": "Short type name of the exception object." + }, + "fullTypeName": { + "type": "string", + "description": "Fully-qualified type name of the exception object." + }, + "evaluateName": { + "type": "string", + "description": "Optional expression that can be evaluated in the current scope to obtain the exception object." + }, + "stackTrace": { + "type": "string", + "description": "Stack trace at the time the exception was thrown." + }, + "innerException": { + "type": "array", + "items": { + "$ref": "#/definitions/ExceptionDetails" + }, + "description": "Details of the exception contained by this exception, if any." + } + } + }, + + "DisassembledInstruction": { + "type": "object", + "description": "Represents a single disassembled instruction.", + "properties": { + "address": { + "type": "string", + "description": "The address of the instruction. Treated as a hex value if prefixed with '0x', or as a decimal value otherwise." + }, + "instructionBytes": { + "type": "string", + "description": "Optional raw bytes representing the instruction and its operands, in an implementation-defined format." + }, + "instruction": { + "type": "string", + "description": "Text representing the instruction and its operands, in an implementation-defined format." + }, + "symbol": { + "type": "string", + "description": "Name of the symbol that corresponds with the location of this instruction, if any." + }, + "location": { + "$ref": "#/definitions/Source", + "description": "Source location that corresponds to this instruction, if any.\nShould always be set (if available) on the first instruction returned,\nbut can be omitted afterwards if this instruction maps to the same source file as the previous instruction." + }, + "line": { + "type": "integer", + "description": "The line within the source location that corresponds to this instruction, if any." + }, + "column": { + "type": "integer", + "description": "The column within the line that corresponds to this instruction, if any." + }, + "endLine": { + "type": "integer", + "description": "The end line of the range that corresponds to this instruction, if any." + }, + "endColumn": { + "type": "integer", + "description": "The end column of the range that corresponds to this instruction, if any." + } + }, + "required": [ "address", "instruction" ] + }, + + "InvalidatedAreas": { + "type": "string", + "description": "Logical areas that can be invalidated by the 'invalidated' event.", + "_enum": [ "all", "stacks", "threads", "variables" ], + "enumDescriptions": [ + "All previously fetched data has become invalid and needs to be refetched.", + "Previously fetched stack related data has become invalid and needs to be refetched.", + "Previously fetched thread related data has become invalid and needs to be refetched.", + "Previously fetched variable data has become invalid and needs to be refetched." + ] + } + + } +} diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocolCustom.json b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocolCustom.json new file mode 120000 index 0000000..aded81a --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocolCustom.json @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/36/62/8c/fdb379b3aa3350f5c815cfd0df0eae9e4e997f585bd698328efc6fe103 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_base_schema.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_base_schema.py new file mode 120000 index 0000000..950538c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_base_schema.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/29/ed/85/7fff1244a1920da984d1e671c7cbc060dc7829af6826a466357e437d00 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py new file mode 120000 index 0000000..7c07565 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/56/20/2c/71e79b32c378644f05de7c2f71937fd0006bf2090a757125fd3e4116c3 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema_log.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema_log.py new file mode 120000 index 0000000..a0b2f7b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema_log.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/68/c2/02/ac1ccb5826b29d9cb64f2b04bf3dffb1d1d77eb4251411c934a97b8fb9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevconsole_code.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevconsole_code.py new file mode 100644 index 0000000..e6ba300 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevconsole_code.py @@ -0,0 +1,554 @@ +""" +A copy of the code module in the standard library with some changes to work with +async evaluation. + +Utilities needed to emulate Python's interactive interpreter. +""" + +# Inspired by similar code by Jeff Epler and Fredrik Lundh. + +import sys +import traceback +import inspect + +# START --------------------------- from codeop import CommandCompiler, compile_command +# START --------------------------- from codeop import CommandCompiler, compile_command +# START --------------------------- from codeop import CommandCompiler, compile_command +# START --------------------------- from codeop import CommandCompiler, compile_command +# START --------------------------- from codeop import CommandCompiler, compile_command +r"""Utilities to compile possibly incomplete Python source code. + +This module provides two interfaces, broadly similar to the builtin +function compile(), which take program text, a filename and a 'mode' +and: + +- Return code object if the command is complete and valid +- Return None if the command is incomplete +- Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + +Approach: + +First, check if the source consists entirely of blank lines and +comments; if so, replace it with 'pass', because the built-in +parser doesn't always do the right thing for these. + +Compile three times: as is, with \n, and with \n\n appended. If it +compiles as is, it's complete. If it compiles with one \n appended, +we expect more. If it doesn't compile either way, we compare the +error we get when compiling with \n or \n\n appended. If the errors +are the same, the code is broken. But if the errors are different, we +expect more. Not intuitive; not even guaranteed to hold in future +releases; but this matches the compiler's behavior from Python 1.4 +through 2.2, at least. + +Caveat: + +It is possible (but not likely) that the parser stops parsing with a +successful outcome before reaching the end of the source; in this +case, trailing symbols may be ignored instead of causing an error. +For example, a backslash followed by two newlines may be followed by +arbitrary garbage. This will be fixed once the API for the parser is +better. + +The two interfaces are: + +compile_command(source, filename, symbol): + + Compiles a single command in the manner described above. + +CommandCompiler(): + + Instances of this class have __call__ methods identical in + signature to compile_command; the difference is that if the + instance compiles program text containing a __future__ statement, + the instance 'remembers' and compiles all subsequent program texts + with the statement in force. + +The module also provides another class: + +Compile(): + + Instances of this class act like the built-in function compile, + but with 'memory' in the sense described above. +""" + +import __future__ + +_features = [getattr(__future__, fname) + for fname in __future__.all_feature_names] + +__all__ = ["compile_command", "Compile", "CommandCompiler"] + +PyCF_DONT_IMPLY_DEDENT = 0x200 # Matches pythonrun.h + + +def _maybe_compile(compiler, source, filename, symbol): + # Check for source consisting of only blank lines and comments + for line in source.split("\n"): + line = line.strip() + if line and line[0] != '#': + break # Leave it alone + else: + if symbol != "eval": + source = "pass" # Replace it with a 'pass' statement + + err = err1 = err2 = None + code = code1 = code2 = None + + try: + code = compiler(source, filename, symbol) + except SyntaxError as err: + pass + + try: + code1 = compiler(source + "\n", filename, symbol) + except SyntaxError as e: + err1 = e + + try: + code2 = compiler(source + "\n\n", filename, symbol) + except SyntaxError as e: + err2 = e + + try: + if code: + return code + if not code1 and repr(err1) == repr(err2): + raise err1 + finally: + err1 = err2 = None + + +def _compile(source, filename, symbol): + return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT) + + +def compile_command(source, filename="", symbol="single"): + r"""Compile a command and determine whether it is incomplete. + + Arguments: + + source -- the source string; may contain \n characters + filename -- optional filename from which source was read; default + "" + symbol -- optional grammar start symbol; "single" (default) or "eval" + + Return value / exceptions raised: + + - Return a code object if the command is complete and valid + - Return None if the command is incomplete + - Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + """ + return _maybe_compile(_compile, source, filename, symbol) + + +class Compile: + """Instances of this class behave much like the built-in compile + function, but if one is used to compile text containing a future + statement, it "remembers" and compiles all subsequent program texts + with the statement in force.""" + + def __init__(self): + self.flags = PyCF_DONT_IMPLY_DEDENT + + try: + from ast import PyCF_ALLOW_TOP_LEVEL_AWAIT + self.flags |= PyCF_ALLOW_TOP_LEVEL_AWAIT + except: + pass + + def __call__(self, source, filename, symbol): + codeob = compile(source, filename, symbol, self.flags, 1) + for feature in _features: + if codeob.co_flags & feature.compiler_flag: + self.flags |= feature.compiler_flag + return codeob + + +class CommandCompiler: + """Instances of this class have __call__ methods identical in + signature to compile_command; the difference is that if the + instance compiles program text containing a __future__ statement, + the instance 'remembers' and compiles all subsequent program texts + with the statement in force.""" + + def __init__(self,): + self.compiler = Compile() + + def __call__(self, source, filename="", symbol="single"): + r"""Compile a command and determine whether it is incomplete. + + Arguments: + + source -- the source string; may contain \n characters + filename -- optional filename from which source was read; + default "" + symbol -- optional grammar start symbol; "single" (default) or + "eval" + + Return value / exceptions raised: + + - Return a code object if the command is complete and valid + - Return None if the command is incomplete + - Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + """ + return _maybe_compile(self.compiler, source, filename, symbol) + +# END --------------------------- from codeop import CommandCompiler, compile_command +# END --------------------------- from codeop import CommandCompiler, compile_command +# END --------------------------- from codeop import CommandCompiler, compile_command +# END --------------------------- from codeop import CommandCompiler, compile_command +# END --------------------------- from codeop import CommandCompiler, compile_command + + +__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", + "compile_command"] + +from _pydev_bundle._pydev_saved_modules import threading + + +class _EvalAwaitInNewEventLoop(threading.Thread): + + def __init__(self, compiled, updated_globals, updated_locals): + threading.Thread.__init__(self) + self.daemon = True + self._compiled = compiled + self._updated_globals = updated_globals + self._updated_locals = updated_locals + + # Output + self.evaluated_value = None + self.exc = None + + async def _async_func(self): + return await eval(self._compiled, self._updated_locals, self._updated_globals) + + def run(self): + try: + import asyncio + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + self.evaluated_value = asyncio.run(self._async_func()) + except: + self.exc = sys.exc_info() + + +class InteractiveInterpreter: + """Base class for InteractiveConsole. + + This class deals with parsing and interpreter state (the user's + namespace); it doesn't deal with input buffering or prompting or + input file naming (the filename is always passed in explicitly). + + """ + + def __init__(self, locals=None): + """Constructor. + + The optional 'locals' argument specifies the dictionary in + which code will be executed; it defaults to a newly created + dictionary with key "__name__" set to "__console__" and key + "__doc__" set to None. + + """ + if locals is None: + locals = {"__name__": "__console__", "__doc__": None} + self.locals = locals + self.compile = CommandCompiler() + + def runsource(self, source, filename="", symbol="single"): + """Compile and run some source in the interpreter. + + Arguments are as for compile_command(). + + One of several things can happen: + + 1) The input is incorrect; compile_command() raised an + exception (SyntaxError or OverflowError). A syntax traceback + will be printed by calling the showsyntaxerror() method. + + 2) The input is incomplete, and more input is required; + compile_command() returned None. Nothing happens. + + 3) The input is complete; compile_command() returned a code + object. The code is executed by calling self.runcode() (which + also handles run-time exceptions, except for SystemExit). + + The return value is True in case 2, False in the other cases (unless + an exception is raised). The return value can be used to + decide whether to use sys.ps1 or sys.ps2 to prompt the next + line. + + """ + try: + code = self.compile(source, filename, symbol) + except (OverflowError, SyntaxError, ValueError): + # Case 1 + self.showsyntaxerror(filename) + return False + + if code is None: + # Case 2 + return True + + # Case 3 + self.runcode(code) + return False + + def runcode(self, code): + """Execute a code object. + + When an exception occurs, self.showtraceback() is called to + display a traceback. All exceptions are caught except + SystemExit, which is reraised. + + A note about KeyboardInterrupt: this exception may occur + elsewhere in this code, and may not always be caught. The + caller should be prepared to deal with it. + + """ + try: + is_async = False + if hasattr(inspect, 'CO_COROUTINE'): + is_async = inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE + + if is_async: + t = _EvalAwaitInNewEventLoop(code, self.locals, None) + t.start() + t.join() + + if t.exc: + raise t.exc[1].with_traceback(t.exc[2]) + + else: + exec(code, self.locals) + except SystemExit: + raise + except: + self.showtraceback() + + def showsyntaxerror(self, filename=None): + """Display the syntax error that just occurred. + + This doesn't display a stack trace because there isn't one. + + If a filename is given, it is stuffed in the exception instead + of what was there before (because Python's parser always uses + "" when reading from a string). + + The output is written by self.write(), below. + + """ + type, value, tb = sys.exc_info() + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + if filename and type is SyntaxError: + # Work hard to stuff the correct filename in the exception + try: + msg, (dummy_filename, lineno, offset, line) = value.args + except ValueError: + # Not the format we expect; leave it alone + pass + else: + # Stuff in the right filename + value = SyntaxError(msg, (filename, lineno, offset, line)) + sys.last_value = value + if sys.excepthook is sys.__excepthook__: + lines = traceback.format_exception_only(type, value) + self.write(''.join(lines)) + else: + # If someone has set sys.excepthook, we let that take precedence + # over self.write + sys.excepthook(type, value, tb) + + def showtraceback(self): + """Display the exception that just occurred. + + We remove the first stack item because it is our own code. + + The output is written by self.write(), below. + + """ + sys.last_type, sys.last_value, last_tb = ei = sys.exc_info() + sys.last_traceback = last_tb + try: + lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next) + if sys.excepthook is sys.__excepthook__: + self.write(''.join(lines)) + else: + # If someone has set sys.excepthook, we let that take precedence + # over self.write + sys.excepthook(ei[0], ei[1], last_tb) + finally: + last_tb = ei = None + + def write(self, data): + """Write a string. + + The base implementation writes to sys.stderr; a subclass may + replace this with a different implementation. + + """ + sys.stderr.write(data) + + +class InteractiveConsole(InteractiveInterpreter): + """Closely emulate the behavior of the interactive Python interpreter. + + This class builds on InteractiveInterpreter and adds prompting + using the familiar sys.ps1 and sys.ps2, and input buffering. + + """ + + def __init__(self, locals=None, filename=""): + """Constructor. + + The optional locals argument will be passed to the + InteractiveInterpreter base class. + + The optional filename argument should specify the (file)name + of the input stream; it will show up in tracebacks. + + """ + InteractiveInterpreter.__init__(self, locals) + self.filename = filename + self.resetbuffer() + + def resetbuffer(self): + """Reset the input buffer.""" + self.buffer = [] + + def interact(self, banner=None, exitmsg=None): + """Closely emulate the interactive Python console. + + The optional banner argument specifies the banner to print + before the first interaction; by default it prints a banner + similar to the one printed by the real Python interpreter, + followed by the current class name in parentheses (so as not + to confuse this with the real interpreter -- since it's so + close!). + + The optional exitmsg argument specifies the exit message + printed when exiting. Pass the empty string to suppress + printing an exit message. If exitmsg is not given or None, + a default message is printed. + + """ + try: + sys.ps1 + except AttributeError: + sys.ps1 = ">>> " + try: + sys.ps2 + except AttributeError: + sys.ps2 = "... " + cprt = 'Type "help", "copyright", "credits" or "license" for more information.' + if banner is None: + self.write("Python %s on %s\n%s\n(%s)\n" % + (sys.version, sys.platform, cprt, + self.__class__.__name__)) + elif banner: + self.write("%s\n" % str(banner)) + more = 0 + while 1: + try: + if more: + prompt = sys.ps2 + else: + prompt = sys.ps1 + try: + line = self.raw_input(prompt) + except EOFError: + self.write("\n") + break + else: + more = self.push(line) + except KeyboardInterrupt: + self.write("\nKeyboardInterrupt\n") + self.resetbuffer() + more = 0 + if exitmsg is None: + self.write('now exiting %s...\n' % self.__class__.__name__) + elif exitmsg != '': + self.write('%s\n' % exitmsg) + + def push(self, line): + """Push a line to the interpreter. + + The line should not have a trailing newline; it may have + internal newlines. The line is appended to a buffer and the + interpreter's runsource() method is called with the + concatenated contents of the buffer as source. If this + indicates that the command was executed or invalid, the buffer + is reset; otherwise, the command is incomplete, and the buffer + is left as it was after the line was appended. The return + value is 1 if more input is required, 0 if the line was dealt + with in some way (this is the same as runsource()). + + """ + self.buffer.append(line) + source = "\n".join(self.buffer) + more = self.runsource(source, self.filename) + if not more: + self.resetbuffer() + return more + + def raw_input(self, prompt=""): + """Write a prompt and read a line. + + The returned line does not include the trailing newline. + When the user enters the EOF key sequence, EOFError is raised. + + The base implementation uses the built-in function + input(); a subclass may replace this with a different + implementation. + + """ + return input(prompt) + + +def interact(banner=None, readfunc=None, local=None, exitmsg=None): + """Closely emulate the interactive Python interpreter. + + This is a backwards compatible interface to the InteractiveConsole + class. When readfunc is not specified, it attempts to import the + readline module to enable GNU readline if it is available. + + Arguments (all optional, all default to None): + + banner -- passed to InteractiveConsole.interact() + readfunc -- if not None, replaces InteractiveConsole.raw_input() + local -- passed to InteractiveInterpreter.__init__() + exitmsg -- passed to InteractiveConsole.interact() + + """ + console = InteractiveConsole(local) + if readfunc is not None: + console.raw_input = readfunc + else: + try: + import readline + except ImportError: + pass + console.interact(banner, exitmsg) + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument('-q', action='store_true', + help="don't print version and copyright messages") + args = parser.parse_args() + if args.q or sys.flags.quiet: + banner = '' + else: + banner = None + interact(banner) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py new file mode 120000 index 0000000..8fb5b3f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/14/fe/7b/a26cf82b68ab86e7f9ce5452b46a052e096cc3d99cc124be8ac2754bc2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info_regular.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info_regular.py new file mode 100644 index 0000000..2ed1313 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info_regular.py @@ -0,0 +1,153 @@ +from _pydevd_bundle.pydevd_constants import (STATE_RUN, PYTHON_SUSPEND, SUPPORT_GEVENT, ForkSafeLock, + _current_frames) +from _pydev_bundle import pydev_log +# IFDEF CYTHON +# pydev_log.debug("Using Cython speedups") +# ELSE +from _pydevd_bundle.pydevd_frame import PyDBFrame +# ENDIF + +version = 11 + + +#======================================================================================================================= +# PyDBAdditionalThreadInfo +#======================================================================================================================= +# IFDEF CYTHON +# cdef class PyDBAdditionalThreadInfo: +# ELSE +class PyDBAdditionalThreadInfo(object): +# ENDIF + + # Note: the params in cython are declared in pydevd_cython.pxd. + # IFDEF CYTHON + # ELSE + __slots__ = [ + 'pydev_state', + 'pydev_step_stop', + 'pydev_original_step_cmd', + 'pydev_step_cmd', + 'pydev_notify_kill', + 'pydev_django_resolve_frame', + 'pydev_call_from_jinja2', + 'pydev_call_inside_jinja2', + 'is_tracing', + 'conditional_breakpoint_exception', + 'pydev_message', + 'suspend_type', + 'pydev_next_line', + 'pydev_func_name', + 'suspended_at_unhandled', + 'trace_suspend_type', + 'top_level_thread_tracer_no_back_frames', + 'top_level_thread_tracer_unhandled', + 'thread_tracer', + 'step_in_initial_location', + + # Used for CMD_SMART_STEP_INTO (to know which smart step into variant to use) + 'pydev_smart_parent_offset', + 'pydev_smart_child_offset', + + # Used for CMD_SMART_STEP_INTO (list[_pydevd_bundle.pydevd_bytecode_utils.Variant]) + # Filled when the cmd_get_smart_step_into_variants is requested (so, this is a copy + # of the last request for a given thread and pydev_smart_parent_offset/pydev_smart_child_offset relies on it). + 'pydev_smart_step_into_variants', + 'target_id_to_smart_step_into_variant', + + 'pydev_use_scoped_step_frame', + ] + # ENDIF + + def __init__(self): + self.pydev_state = STATE_RUN # STATE_RUN or STATE_SUSPEND + self.pydev_step_stop = None + + # Note: we have `pydev_original_step_cmd` and `pydev_step_cmd` because the original is to + # say the action that started it and the other is to say what's the current tracing behavior + # (because it's possible that we start with a step over but may have to switch to a + # different step strategy -- for instance, if a step over is done and we return the current + # method the strategy is changed to a step in). + + self.pydev_original_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + + self.pydev_notify_kill = False + self.pydev_django_resolve_frame = False + self.pydev_call_from_jinja2 = None + self.pydev_call_inside_jinja2 = None + self.is_tracing = 0 + self.conditional_breakpoint_exception = None + self.pydev_message = '' + self.suspend_type = PYTHON_SUSPEND + self.pydev_next_line = -1 + self.pydev_func_name = '.invalid.' # Must match the type in cython + self.suspended_at_unhandled = False + self.trace_suspend_type = 'trace' # 'trace' or 'frame_eval' + self.top_level_thread_tracer_no_back_frames = [] + self.top_level_thread_tracer_unhandled = None + self.thread_tracer = None + self.step_in_initial_location = None + self.pydev_smart_parent_offset = -1 + self.pydev_smart_child_offset = -1 + self.pydev_smart_step_into_variants = () + self.target_id_to_smart_step_into_variant = {} + + # Flag to indicate ipython use-case where each line will be executed as a call/line/return + # in a new new frame but in practice we want to consider each new frame as if it was all + # part of the same frame. + # + # In practice this means that a step over shouldn't revert to a step in and we need some + # special logic to know when we should stop in a step over as we need to consider 2 + # different frames as being equal if they're logically the continuation of a frame + # being executed by ipython line by line. + # + # See: https://github.com/microsoft/debugpy/issues/869#issuecomment-1132141003 + self.pydev_use_scoped_step_frame = False + + def get_topmost_frame(self, thread): + ''' + Gets the topmost frame for the given thread. Note that it may be None + and callers should remove the reference to the frame as soon as possible + to avoid disturbing user code. + ''' + # sys._current_frames(): dictionary with thread id -> topmost frame + current_frames = _current_frames() + topmost_frame = current_frames.get(thread.ident) + if topmost_frame is None: + # Note: this is expected for dummy threads (so, getting the topmost frame should be + # treated as optional). + pydev_log.info( + 'Unable to get topmost frame for thread: %s, thread.ident: %s, id(thread): %s\nCurrent frames: %s.\n' + 'GEVENT_SUPPORT: %s', + thread, + thread.ident, + id(thread), + current_frames, + SUPPORT_GEVENT, + ) + + return topmost_frame + + def __str__(self): + return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + + +_set_additional_thread_info_lock = ForkSafeLock() + + +def set_additional_thread_info(thread): + try: + additional_info = thread.additional_info + if additional_info is None: + raise AttributeError() + except: + with _set_additional_thread_info_lock: + # If it's not there, set it within a lock to avoid any racing + # conditions. + additional_info = getattr(thread, 'additional_info', None) + if additional_info is None: + additional_info = PyDBAdditionalThreadInfo() + thread.additional_info = additional_info + + return additional_info diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_api.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_api.py new file mode 100644 index 0000000..28a28d6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_api.py @@ -0,0 +1,1141 @@ +import sys +import bisect +import types + +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle import pydevd_utils, pydevd_source_mapping +from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info +from _pydevd_bundle.pydevd_comm import (InternalGetThreadStack, internal_get_completions, + InternalSetNextStatementThread, internal_reload_code, + InternalGetVariable, InternalGetArray, InternalLoadFullValue, + internal_get_description, internal_get_frame, internal_evaluate_expression, InternalConsoleExec, + internal_get_variable_json, internal_change_variable, internal_change_variable_json, + internal_evaluate_expression_json, internal_set_expression_json, internal_get_exception_details_json, + internal_step_in_thread, internal_smart_step_into) +from _pydevd_bundle.pydevd_comm_constants import (CMD_THREAD_SUSPEND, file_system_encoding, + CMD_STEP_INTO_MY_CODE, CMD_STOP_ON_START, CMD_SMART_STEP_INTO) +from _pydevd_bundle.pydevd_constants import (get_current_thread_id, set_protocol, get_protocol, + HTTP_JSON_PROTOCOL, JSON_PROTOCOL, DebugInfoHolder, IS_WINDOWS) +from _pydevd_bundle.pydevd_net_command_factory_json import NetCommandFactoryJson +from _pydevd_bundle.pydevd_net_command_factory_xml import NetCommandFactory +import pydevd_file_utils +from _pydev_bundle import pydev_log +from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint +from pydevd_tracing import get_exception_traceback_str +import os +import subprocess +import ctypes +from _pydevd_bundle.pydevd_collect_bytecode_info import code_to_bytecode_representation +import itertools +import linecache +from _pydevd_bundle.pydevd_utils import DAPGrouper +from _pydevd_bundle.pydevd_daemon_thread import run_as_pydevd_daemon_thread +from _pydevd_bundle.pydevd_thread_lifecycle import pydevd_find_thread_by_id, resume_threads +import tokenize + +try: + import dis +except ImportError: + + def _get_code_lines(code): + raise NotImplementedError + +else: + + def _get_code_lines(code): + if not isinstance(code, types.CodeType): + path = code + with tokenize.open(path) as f: + src = f.read() + code = compile(src, path, 'exec', 0, dont_inherit=True) + return _get_code_lines(code) + + def iterate(): + # First, get all line starts for this code object. This does not include + # bodies of nested class and function definitions, as they have their + # own objects. + for _, lineno in dis.findlinestarts(code): + yield lineno + + # For nested class and function definitions, their respective code objects + # are constants referenced by this object. + for const in code.co_consts: + if isinstance(const, types.CodeType) and const.co_filename == code.co_filename: + for lineno in _get_code_lines(const): + yield lineno + + return iterate() + + +class PyDevdAPI(object): + + class VariablePresentation(object): + + def __init__(self, special='group', function='group', class_='group', protected='inline'): + self._presentation = { + DAPGrouper.SCOPE_SPECIAL_VARS: special, + DAPGrouper.SCOPE_FUNCTION_VARS: function, + DAPGrouper.SCOPE_CLASS_VARS: class_, + DAPGrouper.SCOPE_PROTECTED_VARS: protected, + } + + def get_presentation(self, scope): + return self._presentation[scope] + + def run(self, py_db): + py_db.ready_to_run = True + + def notify_initialize(self, py_db): + py_db.on_initialize() + + def notify_configuration_done(self, py_db): + py_db.on_configuration_done() + + def notify_disconnect(self, py_db): + py_db.on_disconnect() + + def set_protocol(self, py_db, seq, protocol): + set_protocol(protocol.strip()) + if get_protocol() in (HTTP_JSON_PROTOCOL, JSON_PROTOCOL): + cmd_factory_class = NetCommandFactoryJson + else: + cmd_factory_class = NetCommandFactory + + if not isinstance(py_db.cmd_factory, cmd_factory_class): + py_db.cmd_factory = cmd_factory_class() + + return py_db.cmd_factory.make_protocol_set_message(seq) + + def set_ide_os_and_breakpoints_by(self, py_db, seq, ide_os, breakpoints_by): + ''' + :param ide_os: 'WINDOWS' or 'UNIX' + :param breakpoints_by: 'ID' or 'LINE' + ''' + if breakpoints_by == 'ID': + py_db._set_breakpoints_with_id = True + else: + py_db._set_breakpoints_with_id = False + + self.set_ide_os(ide_os) + + return py_db.cmd_factory.make_version_message(seq) + + def set_ide_os(self, ide_os): + ''' + :param ide_os: 'WINDOWS' or 'UNIX' + ''' + pydevd_file_utils.set_ide_os(ide_os) + + def set_gui_event_loop(self, py_db, gui_event_loop): + py_db._gui_event_loop = gui_event_loop + + def send_error_message(self, py_db, msg): + cmd = py_db.cmd_factory.make_warning_message('pydevd: %s\n' % (msg,)) + py_db.writer.add_command(cmd) + + def set_show_return_values(self, py_db, show_return_values): + if show_return_values: + py_db.show_return_values = True + else: + if py_db.show_return_values: + # We should remove saved return values + py_db.remove_return_values_flag = True + py_db.show_return_values = False + pydev_log.debug("Show return values: %s", py_db.show_return_values) + + def list_threads(self, py_db, seq): + # Response is the command with the list of threads to be added to the writer thread. + return py_db.cmd_factory.make_list_threads_message(py_db, seq) + + def request_suspend_thread(self, py_db, thread_id='*'): + # Yes, thread suspend is done at this point, not through an internal command. + threads = [] + suspend_all = thread_id.strip() == '*' + if suspend_all: + threads = pydevd_utils.get_non_pydevd_threads() + + elif thread_id.startswith('__frame__:'): + sys.stderr.write("Can't suspend tasklet: %s\n" % (thread_id,)) + + else: + threads = [pydevd_find_thread_by_id(thread_id)] + + for t in threads: + if t is None: + continue + py_db.set_suspend( + t, + CMD_THREAD_SUSPEND, + suspend_other_threads=suspend_all, + is_pause=True, + ) + # Break here (even if it's suspend all) as py_db.set_suspend will + # take care of suspending other threads. + break + + def set_enable_thread_notifications(self, py_db, enable): + ''' + When disabled, no thread notifications (for creation/removal) will be + issued until it's re-enabled. + + Note that when it's re-enabled, a creation notification will be sent for + all existing threads even if it was previously sent (this is meant to + be used on disconnect/reconnect). + ''' + py_db.set_enable_thread_notifications(enable) + + def request_disconnect(self, py_db, resume_threads): + self.set_enable_thread_notifications(py_db, False) + self.remove_all_breakpoints(py_db, '*') + self.remove_all_exception_breakpoints(py_db) + self.notify_disconnect(py_db) + + if resume_threads: + self.request_resume_thread(thread_id='*') + + def request_resume_thread(self, thread_id): + resume_threads(thread_id) + + def request_completions(self, py_db, seq, thread_id, frame_id, act_tok, line=-1, column=-1): + py_db.post_method_as_internal_command( + thread_id, internal_get_completions, seq, thread_id, frame_id, act_tok, line=line, column=column) + + def request_stack(self, py_db, seq, thread_id, fmt=None, timeout=.5, start_frame=0, levels=0): + # If it's already suspended, get it right away. + internal_get_thread_stack = InternalGetThreadStack( + seq, thread_id, py_db, set_additional_thread_info, fmt=fmt, timeout=timeout, start_frame=start_frame, levels=levels) + if internal_get_thread_stack.can_be_executed_by(get_current_thread_id(threading.current_thread())): + internal_get_thread_stack.do_it(py_db) + else: + py_db.post_internal_command(internal_get_thread_stack, '*') + + def request_exception_info_json(self, py_db, request, thread_id, max_frames): + py_db.post_method_as_internal_command( + thread_id, + internal_get_exception_details_json, + request, + thread_id, + max_frames, + set_additional_thread_info=set_additional_thread_info, + iter_visible_frames_info=py_db.cmd_factory._iter_visible_frames_info, + ) + + def request_step(self, py_db, thread_id, step_cmd_id): + t = pydevd_find_thread_by_id(thread_id) + if t: + py_db.post_method_as_internal_command( + thread_id, + internal_step_in_thread, + thread_id, + step_cmd_id, + set_additional_thread_info=set_additional_thread_info, + ) + elif thread_id.startswith('__frame__:'): + sys.stderr.write("Can't make tasklet step command: %s\n" % (thread_id,)) + + def request_smart_step_into(self, py_db, seq, thread_id, offset, child_offset): + t = pydevd_find_thread_by_id(thread_id) + if t: + py_db.post_method_as_internal_command( + thread_id, internal_smart_step_into, thread_id, offset, child_offset, set_additional_thread_info=set_additional_thread_info) + elif thread_id.startswith('__frame__:'): + sys.stderr.write("Can't set next statement in tasklet: %s\n" % (thread_id,)) + + def request_smart_step_into_by_func_name(self, py_db, seq, thread_id, line, func_name): + # Same thing as set next, just with a different cmd id. + self.request_set_next(py_db, seq, thread_id, CMD_SMART_STEP_INTO, None, line, func_name) + + def request_set_next(self, py_db, seq, thread_id, set_next_cmd_id, original_filename, line, func_name): + ''' + set_next_cmd_id may actually be one of: + + CMD_RUN_TO_LINE + CMD_SET_NEXT_STATEMENT + + CMD_SMART_STEP_INTO -- note: request_smart_step_into is preferred if it's possible + to work with bytecode offset. + + :param Optional[str] original_filename: + If available, the filename may be source translated, otherwise no translation will take + place (the set next just needs the line afterwards as it executes locally, but for + the Jupyter integration, the source mapping may change the actual lines and not only + the filename). + ''' + t = pydevd_find_thread_by_id(thread_id) + if t: + if original_filename is not None: + translated_filename = self.filename_to_server(original_filename) # Apply user path mapping. + pydev_log.debug('Set next (after path translation) in: %s line: %s', translated_filename, line) + func_name = self.to_str(func_name) + + assert translated_filename.__class__ == str # i.e.: bytes on py2 and str on py3 + assert func_name.__class__ == str # i.e.: bytes on py2 and str on py3 + + # Apply source mapping (i.e.: ipython). + _source_mapped_filename, new_line, multi_mapping_applied = py_db.source_mapping.map_to_server( + translated_filename, line) + if multi_mapping_applied: + pydev_log.debug('Set next (after source mapping) in: %s line: %s', translated_filename, line) + line = new_line + + int_cmd = InternalSetNextStatementThread(thread_id, set_next_cmd_id, line, func_name, seq=seq) + py_db.post_internal_command(int_cmd, thread_id) + elif thread_id.startswith('__frame__:'): + sys.stderr.write("Can't set next statement in tasklet: %s\n" % (thread_id,)) + + def request_reload_code(self, py_db, seq, module_name, filename): + ''' + :param seq: if -1 no message will be sent back when the reload is done. + + Note: either module_name or filename may be None (but not both at the same time). + ''' + thread_id = '*' # Any thread + # Note: not going for the main thread because in this case it'd only do the load + # when we stopped on a breakpoint. + py_db.post_method_as_internal_command( + thread_id, internal_reload_code, seq, module_name, filename) + + def request_change_variable(self, py_db, seq, thread_id, frame_id, scope, attr, value): + ''' + :param scope: 'FRAME' or 'GLOBAL' + ''' + py_db.post_method_as_internal_command( + thread_id, internal_change_variable, seq, thread_id, frame_id, scope, attr, value) + + def request_get_variable(self, py_db, seq, thread_id, frame_id, scope, attrs): + ''' + :param scope: 'FRAME' or 'GLOBAL' + ''' + int_cmd = InternalGetVariable(seq, thread_id, frame_id, scope, attrs) + py_db.post_internal_command(int_cmd, thread_id) + + def request_get_array(self, py_db, seq, roffset, coffset, rows, cols, fmt, thread_id, frame_id, scope, attrs): + int_cmd = InternalGetArray(seq, roffset, coffset, rows, cols, fmt, thread_id, frame_id, scope, attrs) + py_db.post_internal_command(int_cmd, thread_id) + + def request_load_full_value(self, py_db, seq, thread_id, frame_id, vars): + int_cmd = InternalLoadFullValue(seq, thread_id, frame_id, vars) + py_db.post_internal_command(int_cmd, thread_id) + + def request_get_description(self, py_db, seq, thread_id, frame_id, expression): + py_db.post_method_as_internal_command( + thread_id, internal_get_description, seq, thread_id, frame_id, expression) + + def request_get_frame(self, py_db, seq, thread_id, frame_id): + py_db.post_method_as_internal_command( + thread_id, internal_get_frame, seq, thread_id, frame_id) + + def to_str(self, s): + ''' + -- in py3 raises an error if it's not str already. + ''' + if s.__class__ != str: + raise AssertionError('Expected to have str on Python 3. Found: %s (%s)' % (s, s.__class__)) + return s + + def filename_to_str(self, filename): + ''' + -- in py3 raises an error if it's not str already. + ''' + if filename.__class__ != str: + raise AssertionError('Expected to have str on Python 3. Found: %s (%s)' % (filename, filename.__class__)) + return filename + + def filename_to_server(self, filename): + filename = self.filename_to_str(filename) + filename = pydevd_file_utils.map_file_to_server(filename) + return filename + + class _DummyFrame(object): + ''' + Dummy frame to be used with PyDB.apply_files_filter (as we don't really have the + related frame as breakpoints are added before execution). + ''' + + class _DummyCode(object): + + def __init__(self, filename): + self.co_firstlineno = 1 + self.co_filename = filename + self.co_name = 'invalid func name ' + + def __init__(self, filename): + self.f_code = self._DummyCode(filename) + self.f_globals = {} + + ADD_BREAKPOINT_NO_ERROR = 0 + ADD_BREAKPOINT_FILE_NOT_FOUND = 1 + ADD_BREAKPOINT_FILE_EXCLUDED_BY_FILTERS = 2 + + # This means that the breakpoint couldn't be fully validated (more runtime + # information may be needed). + ADD_BREAKPOINT_LAZY_VALIDATION = 3 + ADD_BREAKPOINT_INVALID_LINE = 4 + + class _AddBreakpointResult(object): + + # :see: ADD_BREAKPOINT_NO_ERROR = 0 + # :see: ADD_BREAKPOINT_FILE_NOT_FOUND = 1 + # :see: ADD_BREAKPOINT_FILE_EXCLUDED_BY_FILTERS = 2 + # :see: ADD_BREAKPOINT_LAZY_VALIDATION = 3 + # :see: ADD_BREAKPOINT_INVALID_LINE = 4 + + __slots__ = ['error_code', 'breakpoint_id', 'translated_filename', 'translated_line', 'original_line'] + + def __init__(self, breakpoint_id, translated_filename, translated_line, original_line): + self.error_code = PyDevdAPI.ADD_BREAKPOINT_NO_ERROR + self.breakpoint_id = breakpoint_id + self.translated_filename = translated_filename + self.translated_line = translated_line + self.original_line = original_line + + def add_breakpoint( + self, py_db, original_filename, breakpoint_type, breakpoint_id, line, condition, func_name, + expression, suspend_policy, hit_condition, is_logpoint, adjust_line=False, on_changed_breakpoint_state=None): + ''' + :param str original_filename: + Note: must be sent as it was received in the protocol. It may be translated in this + function and its final value will be available in the returned _AddBreakpointResult. + + :param str breakpoint_type: + One of: 'python-line', 'django-line', 'jinja2-line'. + + :param int breakpoint_id: + + :param int line: + Note: it's possible that a new line was actually used. If that's the case its + final value will be available in the returned _AddBreakpointResult. + + :param condition: + Either None or the condition to activate the breakpoint. + + :param str func_name: + If "None" (str), may hit in any context. + Empty string will hit only top level. + Any other value must match the scope of the method to be matched. + + :param str expression: + None or the expression to be evaluated. + + :param suspend_policy: + Either "NONE" (to suspend only the current thread when the breakpoint is hit) or + "ALL" (to suspend all threads when a breakpoint is hit). + + :param str hit_condition: + An expression where `@HIT@` will be replaced by the number of hits. + i.e.: `@HIT@ == x` or `@HIT@ >= x` + + :param bool is_logpoint: + If True and an expression is passed, pydevd will create an io message command with the + result of the evaluation. + + :param bool adjust_line: + If True, the breakpoint line should be adjusted if the current line doesn't really + match an executable line (if possible). + + :param callable on_changed_breakpoint_state: + This is called when something changed internally on the breakpoint after it was initially + added (for instance, template file_to_line_to_breakpoints could be signaled as invalid initially and later + when the related template is loaded, if the line is valid it could be marked as valid). + + The signature for the callback should be: + on_changed_breakpoint_state(breakpoint_id: int, add_breakpoint_result: _AddBreakpointResult) + + Note that the add_breakpoint_result should not be modified by the callback (the + implementation may internally reuse the same instance multiple times). + + :return _AddBreakpointResult: + ''' + assert original_filename.__class__ == str, 'Expected str, found: %s' % (original_filename.__class__,) # i.e.: bytes on py2 and str on py3 + + original_filename_normalized = pydevd_file_utils.normcase_from_client(original_filename) + + pydev_log.debug('Request for breakpoint in: %s line: %s', original_filename, line) + original_line = line + # Parameters to reapply breakpoint. + api_add_breakpoint_params = (original_filename, breakpoint_type, breakpoint_id, line, condition, func_name, + expression, suspend_policy, hit_condition, is_logpoint) + + translated_filename = self.filename_to_server(original_filename) # Apply user path mapping. + pydev_log.debug('Breakpoint (after path translation) in: %s line: %s', translated_filename, line) + func_name = self.to_str(func_name) + + assert translated_filename.__class__ == str # i.e.: bytes on py2 and str on py3 + assert func_name.__class__ == str # i.e.: bytes on py2 and str on py3 + + # Apply source mapping (i.e.: ipython). + source_mapped_filename, new_line, multi_mapping_applied = py_db.source_mapping.map_to_server( + translated_filename, line) + + if multi_mapping_applied: + pydev_log.debug('Breakpoint (after source mapping) in: %s line: %s', source_mapped_filename, new_line) + # Note that source mapping is internal and does not change the resulting filename nor line + # (we want the outside world to see the line in the original file and not in the ipython + # cell, otherwise the editor wouldn't be correct as the returned line is the line to + # which the breakpoint will be moved in the editor). + result = self._AddBreakpointResult(breakpoint_id, original_filename, line, original_line) + + # If a multi-mapping was applied, consider it the canonical / source mapped version (translated to ipython cell). + translated_absolute_filename = source_mapped_filename + canonical_normalized_filename = pydevd_file_utils.normcase(source_mapped_filename) + line = new_line + + else: + translated_absolute_filename = pydevd_file_utils.absolute_path(translated_filename) + canonical_normalized_filename = pydevd_file_utils.canonical_normalized_path(translated_filename) + + if adjust_line and not translated_absolute_filename.startswith('<'): + # Validate file_to_line_to_breakpoints and adjust their positions. + try: + lines = sorted(_get_code_lines(translated_absolute_filename)) + except Exception: + pass + else: + if line not in lines: + # Adjust to the first preceding valid line. + idx = bisect.bisect_left(lines, line) + if idx > 0: + line = lines[idx - 1] + + result = self._AddBreakpointResult(breakpoint_id, original_filename, line, original_line) + + py_db.api_received_breakpoints[(original_filename_normalized, breakpoint_id)] = (canonical_normalized_filename, api_add_breakpoint_params) + + if not translated_absolute_filename.startswith('<'): + # Note: if a mapping pointed to a file starting with '<', don't validate. + + if not pydevd_file_utils.exists(translated_absolute_filename): + result.error_code = self.ADD_BREAKPOINT_FILE_NOT_FOUND + return result + + if ( + py_db.is_files_filter_enabled and + not py_db.get_require_module_for_filters() and + py_db.apply_files_filter(self._DummyFrame(translated_absolute_filename), translated_absolute_filename, False) + ): + # Note that if `get_require_module_for_filters()` returns False, we don't do this check. + # This is because we don't have the module name given a file at this point (in + # runtime it's gotten from the frame.f_globals). + # An option could be calculate it based on the filename and current sys.path, + # but on some occasions that may be wrong (for instance with `__main__` or if + # the user dynamically changes the PYTHONPATH). + + # Note: depending on the use-case, filters may be changed, so, keep on going and add the + # breakpoint even with the error code. + result.error_code = self.ADD_BREAKPOINT_FILE_EXCLUDED_BY_FILTERS + + if breakpoint_type == 'python-line': + added_breakpoint = LineBreakpoint( + breakpoint_id, line, condition, func_name, expression, suspend_policy, hit_condition=hit_condition, is_logpoint=is_logpoint) + + file_to_line_to_breakpoints = py_db.breakpoints + file_to_id_to_breakpoint = py_db.file_to_id_to_line_breakpoint + supported_type = True + + else: + add_plugin_breakpoint_result = None + plugin = py_db.get_plugin_lazy_init() + if plugin is not None: + add_plugin_breakpoint_result = plugin.add_breakpoint( + 'add_line_breakpoint', py_db, breakpoint_type, canonical_normalized_filename, + breakpoint_id, line, condition, expression, func_name, hit_condition=hit_condition, is_logpoint=is_logpoint, + add_breakpoint_result=result, on_changed_breakpoint_state=on_changed_breakpoint_state) + + if add_plugin_breakpoint_result is not None: + supported_type = True + added_breakpoint, file_to_line_to_breakpoints = add_plugin_breakpoint_result + file_to_id_to_breakpoint = py_db.file_to_id_to_plugin_breakpoint + else: + supported_type = False + + if not supported_type: + raise NameError(breakpoint_type) + + if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: + pydev_log.debug('Added breakpoint:%s - line:%s - func_name:%s\n', canonical_normalized_filename, line, func_name) + + if canonical_normalized_filename in file_to_id_to_breakpoint: + id_to_pybreakpoint = file_to_id_to_breakpoint[canonical_normalized_filename] + else: + id_to_pybreakpoint = file_to_id_to_breakpoint[canonical_normalized_filename] = {} + + id_to_pybreakpoint[breakpoint_id] = added_breakpoint + py_db.consolidate_breakpoints(canonical_normalized_filename, id_to_pybreakpoint, file_to_line_to_breakpoints) + if py_db.plugin is not None: + py_db.has_plugin_line_breaks = py_db.plugin.has_line_breaks() + py_db.plugin.after_breakpoints_consolidated(py_db, canonical_normalized_filename, id_to_pybreakpoint, file_to_line_to_breakpoints) + + py_db.on_breakpoints_changed() + return result + + def reapply_breakpoints(self, py_db): + ''' + Reapplies all the received breakpoints as they were received by the API (so, new + translations are applied). + ''' + pydev_log.debug('Reapplying breakpoints.') + values = list(py_db.api_received_breakpoints.values()) # Create a copy with items to reapply. + self.remove_all_breakpoints(py_db, '*') + for val in values: + _new_filename, api_add_breakpoint_params = val + self.add_breakpoint(py_db, *api_add_breakpoint_params) + + def remove_all_breakpoints(self, py_db, received_filename): + ''' + Removes all the breakpoints from a given file or from all files if received_filename == '*'. + + :param str received_filename: + Note: must be sent as it was received in the protocol. It may be translated in this + function. + ''' + assert received_filename.__class__ == str # i.e.: bytes on py2 and str on py3 + changed = False + lst = [ + py_db.file_to_id_to_line_breakpoint, + py_db.file_to_id_to_plugin_breakpoint, + py_db.breakpoints + ] + if hasattr(py_db, 'django_breakpoints'): + lst.append(py_db.django_breakpoints) + + if hasattr(py_db, 'jinja2_breakpoints'): + lst.append(py_db.jinja2_breakpoints) + + if received_filename == '*': + py_db.api_received_breakpoints.clear() + + for file_to_id_to_breakpoint in lst: + if file_to_id_to_breakpoint: + file_to_id_to_breakpoint.clear() + changed = True + + else: + received_filename_normalized = pydevd_file_utils.normcase_from_client(received_filename) + items = list(py_db.api_received_breakpoints.items()) # Create a copy to remove items. + translated_filenames = [] + for key, val in items: + original_filename_normalized, _breakpoint_id = key + if original_filename_normalized == received_filename_normalized: + canonical_normalized_filename, _api_add_breakpoint_params = val + # Note: there can be actually 1:N mappings due to source mapping (i.e.: ipython). + translated_filenames.append(canonical_normalized_filename) + del py_db.api_received_breakpoints[key] + + for canonical_normalized_filename in translated_filenames: + for file_to_id_to_breakpoint in lst: + if canonical_normalized_filename in file_to_id_to_breakpoint: + file_to_id_to_breakpoint.pop(canonical_normalized_filename, None) + changed = True + + if changed: + py_db.on_breakpoints_changed(removed=True) + + def remove_breakpoint(self, py_db, received_filename, breakpoint_type, breakpoint_id): + ''' + :param str received_filename: + Note: must be sent as it was received in the protocol. It may be translated in this + function. + + :param str breakpoint_type: + One of: 'python-line', 'django-line', 'jinja2-line'. + + :param int breakpoint_id: + ''' + received_filename_normalized = pydevd_file_utils.normcase_from_client(received_filename) + for key, val in list(py_db.api_received_breakpoints.items()): + original_filename_normalized, existing_breakpoint_id = key + _new_filename, _api_add_breakpoint_params = val + if received_filename_normalized == original_filename_normalized and existing_breakpoint_id == breakpoint_id: + del py_db.api_received_breakpoints[key] + break + else: + pydev_log.info( + 'Did not find breakpoint to remove: %s (breakpoint id: %s)', received_filename, breakpoint_id) + + file_to_id_to_breakpoint = None + received_filename = self.filename_to_server(received_filename) + canonical_normalized_filename = pydevd_file_utils.canonical_normalized_path(received_filename) + + if breakpoint_type == 'python-line': + file_to_line_to_breakpoints = py_db.breakpoints + file_to_id_to_breakpoint = py_db.file_to_id_to_line_breakpoint + + elif py_db.plugin is not None: + result = py_db.plugin.get_breakpoints(py_db, breakpoint_type) + if result is not None: + file_to_id_to_breakpoint = py_db.file_to_id_to_plugin_breakpoint + file_to_line_to_breakpoints = result + + if file_to_id_to_breakpoint is None: + pydev_log.critical('Error removing breakpoint. Cannot handle breakpoint of type %s', breakpoint_type) + + else: + try: + id_to_pybreakpoint = file_to_id_to_breakpoint.get(canonical_normalized_filename, {}) + if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: + existing = id_to_pybreakpoint[breakpoint_id] + pydev_log.info('Removed breakpoint:%s - line:%s - func_name:%s (id: %s)\n' % ( + canonical_normalized_filename, existing.line, existing.func_name.encode('utf-8'), breakpoint_id)) + + del id_to_pybreakpoint[breakpoint_id] + py_db.consolidate_breakpoints(canonical_normalized_filename, id_to_pybreakpoint, file_to_line_to_breakpoints) + if py_db.plugin is not None: + py_db.has_plugin_line_breaks = py_db.plugin.has_line_breaks() + py_db.plugin.after_breakpoints_consolidated(py_db, canonical_normalized_filename, id_to_pybreakpoint, file_to_line_to_breakpoints) + + except KeyError: + pydev_log.info("Error removing breakpoint: Breakpoint id not found: %s id: %s. Available ids: %s\n", + canonical_normalized_filename, breakpoint_id, list(id_to_pybreakpoint)) + + py_db.on_breakpoints_changed(removed=True) + + def set_function_breakpoints(self, py_db, function_breakpoints): + function_breakpoint_name_to_breakpoint = {} + for function_breakpoint in function_breakpoints: + function_breakpoint_name_to_breakpoint[function_breakpoint.func_name] = function_breakpoint + + py_db.function_breakpoint_name_to_breakpoint = function_breakpoint_name_to_breakpoint + py_db.on_breakpoints_changed() + + def request_exec_or_evaluate( + self, py_db, seq, thread_id, frame_id, expression, is_exec, trim_if_too_big, attr_to_set_result): + py_db.post_method_as_internal_command( + thread_id, internal_evaluate_expression, + seq, thread_id, frame_id, expression, is_exec, trim_if_too_big, attr_to_set_result) + + def request_exec_or_evaluate_json( + self, py_db, request, thread_id): + py_db.post_method_as_internal_command( + thread_id, internal_evaluate_expression_json, request, thread_id) + + def request_set_expression_json(self, py_db, request, thread_id): + py_db.post_method_as_internal_command( + thread_id, internal_set_expression_json, request, thread_id) + + def request_console_exec(self, py_db, seq, thread_id, frame_id, expression): + int_cmd = InternalConsoleExec(seq, thread_id, frame_id, expression) + py_db.post_internal_command(int_cmd, thread_id) + + def request_load_source(self, py_db, seq, filename): + ''' + :param str filename: + Note: must be sent as it was received in the protocol. It may be translated in this + function. + ''' + try: + filename = self.filename_to_server(filename) + assert filename.__class__ == str # i.e.: bytes on py2 and str on py3 + + with tokenize.open(filename) as stream: + source = stream.read() + cmd = py_db.cmd_factory.make_load_source_message(seq, source) + except: + cmd = py_db.cmd_factory.make_error_message(seq, get_exception_traceback_str()) + + py_db.writer.add_command(cmd) + + def get_decompiled_source_from_frame_id(self, py_db, frame_id): + ''' + :param py_db: + :param frame_id: + :throws Exception: + If unable to get the frame in the currently paused frames or if some error happened + when decompiling. + ''' + variable = py_db.suspended_frames_manager.get_variable(int(frame_id)) + frame = variable.value + + # Check if it's in the linecache first. + lines = (linecache.getline(frame.f_code.co_filename, i) for i in itertools.count(1)) + lines = itertools.takewhile(bool, lines) # empty lines are '\n', EOF is '' + + source = ''.join(lines) + if not source: + source = code_to_bytecode_representation(frame.f_code) + + return source + + def request_load_source_from_frame_id(self, py_db, seq, frame_id): + try: + source = self.get_decompiled_source_from_frame_id(py_db, frame_id) + cmd = py_db.cmd_factory.make_load_source_from_frame_id_message(seq, source) + except: + cmd = py_db.cmd_factory.make_error_message(seq, get_exception_traceback_str()) + + py_db.writer.add_command(cmd) + + def add_python_exception_breakpoint( + self, + py_db, + exception, + condition, + expression, + notify_on_handled_exceptions, + notify_on_unhandled_exceptions, + notify_on_user_unhandled_exceptions, + notify_on_first_raise_only, + ignore_libraries, + ): + exception_breakpoint = py_db.add_break_on_exception( + exception, + condition=condition, + expression=expression, + notify_on_handled_exceptions=notify_on_handled_exceptions, + notify_on_unhandled_exceptions=notify_on_unhandled_exceptions, + notify_on_user_unhandled_exceptions=notify_on_user_unhandled_exceptions, + notify_on_first_raise_only=notify_on_first_raise_only, + ignore_libraries=ignore_libraries, + ) + + if exception_breakpoint is not None: + py_db.on_breakpoints_changed() + + def add_plugins_exception_breakpoint(self, py_db, breakpoint_type, exception): + supported_type = False + plugin = py_db.get_plugin_lazy_init() + if plugin is not None: + supported_type = plugin.add_breakpoint('add_exception_breakpoint', py_db, breakpoint_type, exception) + + if supported_type: + py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks() + py_db.on_breakpoints_changed() + else: + raise NameError(breakpoint_type) + + def remove_python_exception_breakpoint(self, py_db, exception): + try: + cp = py_db.break_on_uncaught_exceptions.copy() + cp.pop(exception, None) + py_db.break_on_uncaught_exceptions = cp + + cp = py_db.break_on_caught_exceptions.copy() + cp.pop(exception, None) + py_db.break_on_caught_exceptions = cp + + cp = py_db.break_on_user_uncaught_exceptions.copy() + cp.pop(exception, None) + py_db.break_on_user_uncaught_exceptions = cp + except: + pydev_log.exception("Error while removing exception %s", sys.exc_info()[0]) + + py_db.on_breakpoints_changed(removed=True) + + def remove_plugins_exception_breakpoint(self, py_db, exception_type, exception): + # I.e.: no need to initialize lazy (if we didn't have it in the first place, we can't remove + # anything from it anyways). + plugin = py_db.plugin + if plugin is None: + return + + supported_type = plugin.remove_exception_breakpoint(py_db, exception_type, exception) + + if supported_type: + py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks() + else: + pydev_log.info('No exception of type: %s was previously registered.', exception_type) + + py_db.on_breakpoints_changed(removed=True) + + def remove_all_exception_breakpoints(self, py_db): + py_db.break_on_uncaught_exceptions = {} + py_db.break_on_caught_exceptions = {} + py_db.break_on_user_uncaught_exceptions = {} + + plugin = py_db.plugin + if plugin is not None: + plugin.remove_all_exception_breakpoints(py_db) + py_db.on_breakpoints_changed(removed=True) + + def set_project_roots(self, py_db, project_roots): + ''' + :param str project_roots: + ''' + py_db.set_project_roots(project_roots) + + def set_stepping_resumes_all_threads(self, py_db, stepping_resumes_all_threads): + py_db.stepping_resumes_all_threads = stepping_resumes_all_threads + + # Add it to the namespace so that it's available as PyDevdAPI.ExcludeFilter + from _pydevd_bundle.pydevd_filtering import ExcludeFilter # noqa + + def set_exclude_filters(self, py_db, exclude_filters): + ''' + :param list(PyDevdAPI.ExcludeFilter) exclude_filters: + ''' + py_db.set_exclude_filters(exclude_filters) + + def set_use_libraries_filter(self, py_db, use_libraries_filter): + py_db.set_use_libraries_filter(use_libraries_filter) + + def request_get_variable_json(self, py_db, request, thread_id): + ''' + :param VariablesRequest request: + ''' + py_db.post_method_as_internal_command( + thread_id, internal_get_variable_json, request) + + def request_change_variable_json(self, py_db, request, thread_id): + ''' + :param SetVariableRequest request: + ''' + py_db.post_method_as_internal_command( + thread_id, internal_change_variable_json, request) + + def set_dont_trace_start_end_patterns(self, py_db, start_patterns, end_patterns): + # Note: start/end patterns normalized internally. + start_patterns = tuple(pydevd_file_utils.normcase(x) for x in start_patterns) + end_patterns = tuple(pydevd_file_utils.normcase(x) for x in end_patterns) + + # After it's set the first time, we can still change it, but we need to reset the + # related caches. + reset_caches = False + dont_trace_start_end_patterns_previously_set = \ + py_db.dont_trace_external_files.__name__ == 'custom_dont_trace_external_files' + + if not dont_trace_start_end_patterns_previously_set and not start_patterns and not end_patterns: + # If it wasn't set previously and start and end patterns are empty we don't need to do anything. + return + + if not py_db.is_cache_file_type_empty(): + # i.e.: custom function set in set_dont_trace_start_end_patterns. + if dont_trace_start_end_patterns_previously_set: + reset_caches = py_db.dont_trace_external_files.start_patterns != start_patterns or \ + py_db.dont_trace_external_files.end_patterns != end_patterns + + else: + reset_caches = True + + def custom_dont_trace_external_files(abs_path): + normalized_abs_path = pydevd_file_utils.normcase(abs_path) + return normalized_abs_path.startswith(start_patterns) or normalized_abs_path.endswith(end_patterns) + + custom_dont_trace_external_files.start_patterns = start_patterns + custom_dont_trace_external_files.end_patterns = end_patterns + py_db.dont_trace_external_files = custom_dont_trace_external_files + + if reset_caches: + py_db.clear_dont_trace_start_end_patterns_caches() + + def stop_on_entry(self): + main_thread = pydevd_utils.get_main_thread() + if main_thread is None: + pydev_log.critical('Could not find main thread while setting Stop on Entry.') + else: + info = set_additional_thread_info(main_thread) + info.pydev_original_step_cmd = CMD_STOP_ON_START + info.pydev_step_cmd = CMD_STEP_INTO_MY_CODE + + def set_ignore_system_exit_codes(self, py_db, ignore_system_exit_codes): + py_db.set_ignore_system_exit_codes(ignore_system_exit_codes) + + SourceMappingEntry = pydevd_source_mapping.SourceMappingEntry + + def set_source_mapping(self, py_db, source_filename, mapping): + ''' + :param str source_filename: + The filename for the source mapping (bytes on py2 and str on py3). + This filename will be made absolute in this function. + + :param list(SourceMappingEntry) mapping: + A list with the source mapping entries to be applied to the given filename. + + :return str: + An error message if it was not possible to set the mapping or an empty string if + everything is ok. + ''' + source_filename = self.filename_to_server(source_filename) + absolute_source_filename = pydevd_file_utils.absolute_path(source_filename) + for map_entry in mapping: + map_entry.source_filename = absolute_source_filename + error_msg = py_db.source_mapping.set_source_mapping(absolute_source_filename, mapping) + if error_msg: + return error_msg + + self.reapply_breakpoints(py_db) + return '' + + def set_variable_presentation(self, py_db, variable_presentation): + assert isinstance(variable_presentation, self.VariablePresentation) + py_db.variable_presentation = variable_presentation + + def get_ppid(self): + ''' + Provides the parent pid (even for older versions of Python on Windows). + ''' + ppid = None + + try: + ppid = os.getppid() + except AttributeError: + pass + + if ppid is None and IS_WINDOWS: + ppid = self._get_windows_ppid() + + return ppid + + def _get_windows_ppid(self): + this_pid = os.getpid() + for ppid, pid in _list_ppid_and_pid(): + if pid == this_pid: + return ppid + + return None + + def _terminate_child_processes_windows(self, dont_terminate_child_pids): + this_pid = os.getpid() + for _ in range(50): # Try this at most 50 times before giving up. + + # Note: we can't kill the process itself with taskkill, so, we + # list immediate children, kill that tree and then exit this process. + + children_pids = [] + for ppid, pid in _list_ppid_and_pid(): + if ppid == this_pid: + if pid not in dont_terminate_child_pids: + children_pids.append(pid) + + if not children_pids: + break + else: + for pid in children_pids: + self._call( + ['taskkill', '/F', '/PID', str(pid), '/T'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + + del children_pids[:] + + def _terminate_child_processes_linux_and_mac(self, dont_terminate_child_pids): + this_pid = os.getpid() + + def list_children_and_stop_forking(initial_pid, stop=True): + children_pids = [] + if stop: + # Ask to stop forking (shouldn't be called for this process, only subprocesses). + self._call( + ['kill', '-STOP', str(initial_pid)], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + + list_popen = self._popen( + ['pgrep', '-P', str(initial_pid)], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + + if list_popen is not None: + stdout, _ = list_popen.communicate() + for line in stdout.splitlines(): + line = line.decode('ascii').strip() + if line: + pid = str(line) + if pid in dont_terminate_child_pids: + continue + children_pids.append(pid) + # Recursively get children. + children_pids.extend(list_children_and_stop_forking(pid)) + return children_pids + + previously_found = set() + + for _ in range(50): # Try this at most 50 times before giving up. + + children_pids = list_children_and_stop_forking(this_pid, stop=False) + found_new = False + + for pid in children_pids: + if pid not in previously_found: + found_new = True + previously_found.add(pid) + self._call( + ['kill', '-KILL', str(pid)], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + + if not found_new: + break + + def _popen(self, cmdline, **kwargs): + try: + return subprocess.Popen(cmdline, **kwargs) + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1: + pydev_log.exception('Error running: %s' % (' '.join(cmdline))) + return None + + def _call(self, cmdline, **kwargs): + try: + subprocess.check_call(cmdline, **kwargs) + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1: + pydev_log.exception('Error running: %s' % (' '.join(cmdline))) + + def set_terminate_child_processes(self, py_db, terminate_child_processes): + py_db.terminate_child_processes = terminate_child_processes + + def terminate_process(self, py_db): + ''' + Terminates the current process (and child processes if the option to also terminate + child processes is enabled). + ''' + try: + if py_db.terminate_child_processes: + pydev_log.debug('Terminating child processes.') + if IS_WINDOWS: + self._terminate_child_processes_windows(py_db.dont_terminate_child_pids) + else: + self._terminate_child_processes_linux_and_mac(py_db.dont_terminate_child_pids) + finally: + pydev_log.debug('Exiting process (os._exit(0)).') + os._exit(0) + + def _terminate_if_commands_processed(self, py_db): + py_db.dispose_and_kill_all_pydevd_threads() + self.terminate_process(py_db) + + def request_terminate_process(self, py_db): + # We mark with a terminate_requested to avoid that paused threads start running + # (we should terminate as is without letting any paused thread run). + py_db.terminate_requested = True + run_as_pydevd_daemon_thread(py_db, self._terminate_if_commands_processed, py_db) + + def setup_auto_reload_watcher(self, py_db, enable_auto_reload, watch_dirs, poll_target_time, exclude_patterns, include_patterns): + py_db.setup_auto_reload_watcher(enable_auto_reload, watch_dirs, poll_target_time, exclude_patterns, include_patterns) + + +def _list_ppid_and_pid(): + _TH32CS_SNAPPROCESS = 0x00000002 + + class PROCESSENTRY32(ctypes.Structure): + _fields_ = [("dwSize", ctypes.c_uint32), + ("cntUsage", ctypes.c_uint32), + ("th32ProcessID", ctypes.c_uint32), + ("th32DefaultHeapID", ctypes.c_size_t), + ("th32ModuleID", ctypes.c_uint32), + ("cntThreads", ctypes.c_uint32), + ("th32ParentProcessID", ctypes.c_uint32), + ("pcPriClassBase", ctypes.c_long), + ("dwFlags", ctypes.c_uint32), + ("szExeFile", ctypes.c_char * 260)] + + kernel32 = ctypes.windll.kernel32 + snapshot = kernel32.CreateToolhelp32Snapshot(_TH32CS_SNAPPROCESS, 0) + ppid_and_pids = [] + try: + process_entry = PROCESSENTRY32() + process_entry.dwSize = ctypes.sizeof(PROCESSENTRY32) + if not kernel32.Process32First(ctypes.c_void_p(snapshot), ctypes.byref(process_entry)): + pydev_log.critical('Process32First failed (getting process from CreateToolhelp32Snapshot).') + else: + while True: + ppid_and_pids.append((process_entry.th32ParentProcessID, process_entry.th32ProcessID)) + if not kernel32.Process32Next(ctypes.c_void_p(snapshot), ctypes.byref(process_entry)): + break + finally: + kernel32.CloseHandle(snapshot) + + return ppid_and_pids diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_breakpoints.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_breakpoints.py new file mode 100644 index 0000000..d92fccf --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_breakpoints.py @@ -0,0 +1,184 @@ +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_import_class +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame +from _pydev_bundle._pydev_saved_modules import threading + + +class ExceptionBreakpoint(object): + + def __init__( + self, + qname, + condition, + expression, + notify_on_handled_exceptions, + notify_on_unhandled_exceptions, + notify_on_user_unhandled_exceptions, + notify_on_first_raise_only, + ignore_libraries + ): + exctype = get_exception_class(qname) + self.qname = qname + if exctype is not None: + self.name = exctype.__name__ + else: + self.name = None + + self.condition = condition + self.expression = expression + self.notify_on_unhandled_exceptions = notify_on_unhandled_exceptions + self.notify_on_handled_exceptions = notify_on_handled_exceptions + self.notify_on_first_raise_only = notify_on_first_raise_only + self.notify_on_user_unhandled_exceptions = notify_on_user_unhandled_exceptions + self.ignore_libraries = ignore_libraries + + self.type = exctype + + def __str__(self): + return self.qname + + @property + def has_condition(self): + return self.condition is not None + + def handle_hit_condition(self, frame): + return False + + +class LineBreakpoint(object): + + def __init__(self, breakpoint_id, line, condition, func_name, expression, suspend_policy="NONE", hit_condition=None, is_logpoint=False): + self.breakpoint_id = breakpoint_id + self.line = line + self.condition = condition + self.func_name = func_name + self.expression = expression + self.suspend_policy = suspend_policy + self.hit_condition = hit_condition + self._hit_count = 0 + self._hit_condition_lock = threading.Lock() + self.is_logpoint = is_logpoint + + @property + def has_condition(self): + return bool(self.condition) or bool(self.hit_condition) + + def handle_hit_condition(self, frame): + if not self.hit_condition: + return False + ret = False + with self._hit_condition_lock: + self._hit_count += 1 + expr = self.hit_condition.replace('@HIT@', str(self._hit_count)) + try: + ret = bool(eval(expr, frame.f_globals, frame.f_locals)) + except Exception: + ret = False + return ret + + +class FunctionBreakpoint(object): + + def __init__(self, func_name, condition, expression, suspend_policy="NONE", hit_condition=None, is_logpoint=False): + self.condition = condition + self.func_name = func_name + self.expression = expression + self.suspend_policy = suspend_policy + self.hit_condition = hit_condition + self._hit_count = 0 + self._hit_condition_lock = threading.Lock() + self.is_logpoint = is_logpoint + + @property + def has_condition(self): + return bool(self.condition) or bool(self.hit_condition) + + def handle_hit_condition(self, frame): + if not self.hit_condition: + return False + ret = False + with self._hit_condition_lock: + self._hit_count += 1 + expr = self.hit_condition.replace('@HIT@', str(self._hit_count)) + try: + ret = bool(eval(expr, frame.f_globals, frame.f_locals)) + except Exception: + ret = False + return ret + + +def get_exception_breakpoint(exctype, exceptions): + if not exctype: + exception_full_qname = None + else: + exception_full_qname = str(exctype.__module__) + '.' + exctype.__name__ + + exc = None + if exceptions is not None: + try: + return exceptions[exception_full_qname] + except KeyError: + for exception_breakpoint in exceptions.values(): + if exception_breakpoint.type is not None and issubclass(exctype, exception_breakpoint.type): + if exc is None or issubclass(exception_breakpoint.type, exc.type): + exc = exception_breakpoint + return exc + + +def stop_on_unhandled_exception(py_db, thread, additional_info, arg): + exctype, value, tb = arg + break_on_uncaught_exceptions = py_db.break_on_uncaught_exceptions + if break_on_uncaught_exceptions: + exception_breakpoint = py_db.get_exception_breakpoint(exctype, break_on_uncaught_exceptions) + else: + exception_breakpoint = None + + if not exception_breakpoint: + return + + if tb is None: # sometimes it can be None, e.g. with GTK + return + + if exctype is KeyboardInterrupt: + return + + if exctype is SystemExit and py_db.ignore_system_exit_code(value): + return + + frames = [] + user_frame = None + + while tb is not None: + if not py_db.exclude_exception_by_filter(exception_breakpoint, tb): + user_frame = tb.tb_frame + frames.append(tb.tb_frame) + tb = tb.tb_next + + if user_frame is None: + return + + frames_byid = dict([(id(frame), frame) for frame in frames]) + add_exception_to_frame(user_frame, arg) + if exception_breakpoint.condition is not None: + eval_result = py_db.handle_breakpoint_condition(additional_info, exception_breakpoint, user_frame) + if not eval_result: + return + + if exception_breakpoint.expression is not None: + py_db.handle_breakpoint_expression(exception_breakpoint, additional_info, user_frame) + + try: + additional_info.pydev_message = exception_breakpoint.qname + except: + additional_info.pydev_message = exception_breakpoint.qname.encode('utf-8') + + pydev_log.debug('Handling post-mortem stop on exception breakpoint %s' % (exception_breakpoint.qname,)) + + py_db.do_stop_on_unhandled_exception(thread, user_frame, frames_byid, arg) + + +def get_exception_class(kls): + try: + return eval(kls) + except: + return pydevd_import_class.import_name(kls) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_bytecode_utils.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_bytecode_utils.py new file mode 100644 index 0000000..e8c9f54 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_bytecode_utils.py @@ -0,0 +1,843 @@ +""" +Bytecode analysing utils. Originally added for using in smart step into. + +Note: not importable from Python 2. +""" + +from _pydev_bundle import pydev_log +from types import CodeType +from _pydevd_frame_eval.vendored.bytecode.instr import _Variable +from _pydevd_frame_eval.vendored import bytecode +from _pydevd_frame_eval.vendored.bytecode import cfg as bytecode_cfg +import dis +import opcode as _opcode + +from _pydevd_bundle.pydevd_constants import KeyifyList, DebugInfoHolder, IS_PY311_OR_GREATER +from bisect import bisect +from collections import deque + +# When True, throws errors on unknown bytecodes, when False, ignore those as if they didn't change the stack. +STRICT_MODE = False + +DEBUG = False + +_BINARY_OPS = set([opname for opname in dis.opname if opname.startswith('BINARY_')]) + +_BINARY_OP_MAP = { + 'BINARY_POWER': '__pow__', + 'BINARY_MULTIPLY': '__mul__', + 'BINARY_MATRIX_MULTIPLY': '__matmul__', + 'BINARY_FLOOR_DIVIDE': '__floordiv__', + 'BINARY_TRUE_DIVIDE': '__div__', + 'BINARY_MODULO': '__mod__', + 'BINARY_ADD': '__add__', + 'BINARY_SUBTRACT': '__sub__', + 'BINARY_LSHIFT': '__lshift__', + 'BINARY_RSHIFT': '__rshift__', + 'BINARY_AND': '__and__', + 'BINARY_OR': '__or__', + 'BINARY_XOR': '__xor__', + 'BINARY_SUBSCR': '__getitem__', + 'BINARY_DIVIDE': '__div__' +} + +_COMP_OP_MAP = { + '<': '__lt__', + '<=': '__le__', + '==': '__eq__', + '!=': '__ne__', + '>': '__gt__', + '>=': '__ge__', + 'in': '__contains__', + 'not in': '__contains__', +} + + +class Target(object): + __slots__ = ['arg', 'lineno', 'offset', 'children_targets'] + + def __init__(self, arg, lineno, offset, children_targets=()): + self.arg = arg + self.lineno = lineno + self.offset = offset + self.children_targets = children_targets + + def __repr__(self): + ret = [] + for s in self.__slots__: + ret.append('%s: %s' % (s, getattr(self, s))) + return 'Target(%s)' % ', '.join(ret) + + __str__ = __repr__ + + +class _TargetIdHashable(object): + + def __init__(self, target): + self.target = target + + def __eq__(self, other): + if not hasattr(other, 'target'): + return + return other.target is self.target + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return id(self.target) + + +class _StackInterpreter(object): + ''' + Good reference: https://github.com/python/cpython/blob/fcb55c0037baab6f98f91ee38ce84b6f874f034a/Python/ceval.c + ''' + + def __init__(self, bytecode): + self.bytecode = bytecode + self._stack = deque() + self.function_calls = [] + self.load_attrs = {} + self.func = set() + self.func_name_id_to_code_object = {} + + def __str__(self): + return 'Stack:\nFunction calls:\n%s\nLoad attrs:\n%s\n' % (self.function_calls, list(self.load_attrs.values())) + + def _getname(self, instr): + if instr.opcode in _opcode.hascompare: + cmp_op = dis.cmp_op[instr.arg] + if cmp_op not in ('exception match', 'BAD'): + return _COMP_OP_MAP.get(cmp_op, cmp_op) + return instr.arg + + def _getcallname(self, instr): + if instr.name == 'BINARY_SUBSCR': + return '__getitem__().__call__' + if instr.name == 'CALL_FUNCTION': + # Note: previously a '__call__().__call__' was returned, but this was a bit weird + # and on Python 3.9 this construct could appear for some internal things where + # it wouldn't be expected. + # Note: it'd be what we had in func()(). + return None + if instr.name == 'MAKE_FUNCTION': + return '__func__().__call__' + if instr.name == 'LOAD_ASSERTION_ERROR': + return 'AssertionError' + name = self._getname(instr) + if isinstance(name, CodeType): + name = name.co_qualname # Note: only available for Python 3.11 + if isinstance(name, _Variable): + name = name.name + + if not isinstance(name, str): + return None + if name.endswith('>'): # xxx., xxx., ... + return name.split('.')[-1] + return name + + def _no_stack_change(self, instr): + pass # Can be aliased when the instruction does nothing. + + def on_LOAD_GLOBAL(self, instr): + self._stack.append(instr) + + def on_POP_TOP(self, instr): + try: + self._stack.pop() + except IndexError: + pass # Ok (in the end of blocks) + + def on_LOAD_ATTR(self, instr): + self.on_POP_TOP(instr) # replaces the current top + self._stack.append(instr) + self.load_attrs[_TargetIdHashable(instr)] = Target(self._getname(instr), instr.lineno, instr.offset) + + on_LOOKUP_METHOD = on_LOAD_ATTR # Improvement in PyPy + + def on_LOAD_CONST(self, instr): + self._stack.append(instr) + + on_LOAD_DEREF = on_LOAD_CONST + on_LOAD_NAME = on_LOAD_CONST + on_LOAD_CLOSURE = on_LOAD_CONST + on_LOAD_CLASSDEREF = on_LOAD_CONST + + # Although it actually changes the stack, it's inconsequential for us as a function call can't + # really be found there. + on_IMPORT_NAME = _no_stack_change + on_IMPORT_FROM = _no_stack_change + on_IMPORT_STAR = _no_stack_change + on_SETUP_ANNOTATIONS = _no_stack_change + + def on_STORE_FAST(self, instr): + try: + self._stack.pop() + except IndexError: + pass # Ok, we may have a block just with the store + + # Note: it stores in the locals and doesn't put anything in the stack. + + on_STORE_GLOBAL = on_STORE_FAST + on_STORE_DEREF = on_STORE_FAST + on_STORE_ATTR = on_STORE_FAST + on_STORE_NAME = on_STORE_FAST + + on_DELETE_NAME = on_POP_TOP + on_DELETE_ATTR = on_POP_TOP + on_DELETE_GLOBAL = on_POP_TOP + on_DELETE_FAST = on_POP_TOP + on_DELETE_DEREF = on_POP_TOP + + on_DICT_UPDATE = on_POP_TOP + on_SET_UPDATE = on_POP_TOP + + on_GEN_START = on_POP_TOP + + def on_NOP(self, instr): + pass + + def _handle_call_from_instr(self, func_name_instr, func_call_instr): + self.load_attrs.pop(_TargetIdHashable(func_name_instr), None) + call_name = self._getcallname(func_name_instr) + target = None + if not call_name: + pass # Ignore if we can't identify a name + elif call_name in ('', '', '', ''): + code_obj = self.func_name_id_to_code_object[_TargetIdHashable(func_name_instr)] + if code_obj is not None: + children_targets = _get_smart_step_into_targets(code_obj) + if children_targets: + # i.e.: we have targets inside of a or . + # Note that to actually match this in the debugger we need to do matches on 2 frames, + # the one with the and then the actual target inside the . + target = Target(call_name, func_name_instr.lineno, func_call_instr.offset, children_targets) + self.function_calls.append( + target) + + else: + # Ok, regular call + target = Target(call_name, func_name_instr.lineno, func_call_instr.offset) + self.function_calls.append(target) + + if DEBUG and target is not None: + print('Created target', target) + self._stack.append(func_call_instr) # Keep the func call as the result + + def on_COMPARE_OP(self, instr): + try: + _right = self._stack.pop() + except IndexError: + return + try: + _left = self._stack.pop() + except IndexError: + return + + cmp_op = dis.cmp_op[instr.arg] + if cmp_op not in ('exception match', 'BAD'): + self.function_calls.append(Target(self._getname(instr), instr.lineno, instr.offset)) + + self._stack.append(instr) + + def on_IS_OP(self, instr): + try: + self._stack.pop() + except IndexError: + return + try: + self._stack.pop() + except IndexError: + return + + def on_BINARY_SUBSCR(self, instr): + try: + _sub = self._stack.pop() + except IndexError: + return + try: + _container = self._stack.pop() + except IndexError: + return + self.function_calls.append(Target(_BINARY_OP_MAP[instr.name], instr.lineno, instr.offset)) + self._stack.append(instr) + + on_BINARY_MATRIX_MULTIPLY = on_BINARY_SUBSCR + on_BINARY_POWER = on_BINARY_SUBSCR + on_BINARY_MULTIPLY = on_BINARY_SUBSCR + on_BINARY_FLOOR_DIVIDE = on_BINARY_SUBSCR + on_BINARY_TRUE_DIVIDE = on_BINARY_SUBSCR + on_BINARY_MODULO = on_BINARY_SUBSCR + on_BINARY_ADD = on_BINARY_SUBSCR + on_BINARY_SUBTRACT = on_BINARY_SUBSCR + on_BINARY_LSHIFT = on_BINARY_SUBSCR + on_BINARY_RSHIFT = on_BINARY_SUBSCR + on_BINARY_AND = on_BINARY_SUBSCR + on_BINARY_OR = on_BINARY_SUBSCR + on_BINARY_XOR = on_BINARY_SUBSCR + + def on_LOAD_METHOD(self, instr): + self.on_POP_TOP(instr) # Remove the previous as we're loading something from it. + self._stack.append(instr) + + def on_MAKE_FUNCTION(self, instr): + if not IS_PY311_OR_GREATER: + # The qualifier name is no longer put in the stack. + qualname = self._stack.pop() + code_obj_instr = self._stack.pop() + else: + # In 3.11 the code object has a co_qualname which we can use. + qualname = code_obj_instr = self._stack.pop() + + arg = instr.arg + if arg & 0x08: + _func_closure = self._stack.pop() + if arg & 0x04: + _func_annotations = self._stack.pop() + if arg & 0x02: + _func_kwdefaults = self._stack.pop() + if arg & 0x01: + _func_defaults = self._stack.pop() + + call_name = self._getcallname(qualname) + if call_name in ('', '', '', ''): + if isinstance(code_obj_instr.arg, CodeType): + self.func_name_id_to_code_object[_TargetIdHashable(qualname)] = code_obj_instr.arg + self._stack.append(qualname) + + def on_LOAD_FAST(self, instr): + self._stack.append(instr) + + def on_LOAD_ASSERTION_ERROR(self, instr): + self._stack.append(instr) + + on_LOAD_BUILD_CLASS = on_LOAD_FAST + + def on_CALL_METHOD(self, instr): + # pop the actual args + for _ in range(instr.arg): + self._stack.pop() + + func_name_instr = self._stack.pop() + self._handle_call_from_instr(func_name_instr, instr) + + def on_PUSH_NULL(self, instr): + self._stack.append(instr) + + def on_CALL_FUNCTION(self, instr): + arg = instr.arg + + argc = arg & 0xff # positional args + argc += ((arg >> 8) * 2) # keyword args + + # pop the actual args + for _ in range(argc): + try: + self._stack.pop() + except IndexError: + return + + try: + func_name_instr = self._stack.pop() + except IndexError: + return + self._handle_call_from_instr(func_name_instr, instr) + + def on_CALL_FUNCTION_KW(self, instr): + # names of kw args + _names_of_kw_args = self._stack.pop() + + # pop the actual args + arg = instr.arg + + argc = arg & 0xff # positional args + argc += ((arg >> 8) * 2) # keyword args + + for _ in range(argc): + self._stack.pop() + + func_name_instr = self._stack.pop() + self._handle_call_from_instr(func_name_instr, instr) + + def on_CALL_FUNCTION_VAR(self, instr): + # var name + _var_arg = self._stack.pop() + + # pop the actual args + arg = instr.arg + + argc = arg & 0xff # positional args + argc += ((arg >> 8) * 2) # keyword args + + for _ in range(argc): + self._stack.pop() + + func_name_instr = self._stack.pop() + self._handle_call_from_instr(func_name_instr, instr) + + def on_CALL_FUNCTION_VAR_KW(self, instr): + # names of kw args + _names_of_kw_args = self._stack.pop() + + arg = instr.arg + + argc = arg & 0xff # positional args + argc += ((arg >> 8) * 2) # keyword args + + # also pop **kwargs + self._stack.pop() + + # pop the actual args + for _ in range(argc): + self._stack.pop() + + func_name_instr = self._stack.pop() + self._handle_call_from_instr(func_name_instr, instr) + + def on_CALL_FUNCTION_EX(self, instr): + if instr.arg & 0x01: + _kwargs = self._stack.pop() + _callargs = self._stack.pop() + func_name_instr = self._stack.pop() + self._handle_call_from_instr(func_name_instr, instr) + + on_YIELD_VALUE = _no_stack_change + on_GET_AITER = _no_stack_change + on_GET_ANEXT = _no_stack_change + on_END_ASYNC_FOR = _no_stack_change + on_BEFORE_ASYNC_WITH = _no_stack_change + on_SETUP_ASYNC_WITH = _no_stack_change + on_YIELD_FROM = _no_stack_change + on_SETUP_LOOP = _no_stack_change + on_FOR_ITER = _no_stack_change + on_BREAK_LOOP = _no_stack_change + on_JUMP_ABSOLUTE = _no_stack_change + on_RERAISE = _no_stack_change + on_LIST_TO_TUPLE = _no_stack_change + on_CALL_FINALLY = _no_stack_change + on_POP_FINALLY = _no_stack_change + + def on_JUMP_IF_FALSE_OR_POP(self, instr): + try: + self._stack.pop() + except IndexError: + return + + on_JUMP_IF_TRUE_OR_POP = on_JUMP_IF_FALSE_OR_POP + + def on_JUMP_IF_NOT_EXC_MATCH(self, instr): + try: + self._stack.pop() + except IndexError: + return + try: + self._stack.pop() + except IndexError: + return + + def on_ROT_TWO(self, instr): + try: + p0 = self._stack.pop() + except IndexError: + return + + try: + p1 = self._stack.pop() + except: + self._stack.append(p0) + return + + self._stack.append(p0) + self._stack.append(p1) + + def on_ROT_THREE(self, instr): + try: + p0 = self._stack.pop() + except IndexError: + return + + try: + p1 = self._stack.pop() + except: + self._stack.append(p0) + return + + try: + p2 = self._stack.pop() + except: + self._stack.append(p0) + self._stack.append(p1) + return + + self._stack.append(p0) + self._stack.append(p1) + self._stack.append(p2) + + def on_ROT_FOUR(self, instr): + try: + p0 = self._stack.pop() + except IndexError: + return + + try: + p1 = self._stack.pop() + except: + self._stack.append(p0) + return + + try: + p2 = self._stack.pop() + except: + self._stack.append(p0) + self._stack.append(p1) + return + + try: + p3 = self._stack.pop() + except: + self._stack.append(p0) + self._stack.append(p1) + self._stack.append(p2) + return + + self._stack.append(p0) + self._stack.append(p1) + self._stack.append(p2) + self._stack.append(p3) + + def on_BUILD_LIST_FROM_ARG(self, instr): + self._stack.append(instr) + + def on_BUILD_MAP(self, instr): + for _i in range(instr.arg): + self._stack.pop() + self._stack.pop() + self._stack.append(instr) + + def on_BUILD_CONST_KEY_MAP(self, instr): + self.on_POP_TOP(instr) # keys + for _i in range(instr.arg): + self.on_POP_TOP(instr) # value + self._stack.append(instr) + + on_RETURN_VALUE = on_POP_TOP + on_POP_JUMP_IF_FALSE = on_POP_TOP + on_POP_JUMP_IF_TRUE = on_POP_TOP + on_DICT_MERGE = on_POP_TOP + on_LIST_APPEND = on_POP_TOP + on_SET_ADD = on_POP_TOP + on_LIST_EXTEND = on_POP_TOP + on_UNPACK_EX = on_POP_TOP + + # ok: doesn't change the stack (converts top to getiter(top)) + on_GET_ITER = _no_stack_change + on_GET_AWAITABLE = _no_stack_change + on_GET_YIELD_FROM_ITER = _no_stack_change + + def on_RETURN_GENERATOR(self, instr): + self._stack.append(instr) + + on_RETURN_GENERATOR = _no_stack_change + on_RESUME = _no_stack_change + + def on_MAP_ADD(self, instr): + self.on_POP_TOP(instr) + self.on_POP_TOP(instr) + + def on_UNPACK_SEQUENCE(self, instr): + self._stack.pop() + for _i in range(instr.arg): + self._stack.append(instr) + + def on_BUILD_LIST(self, instr): + for _i in range(instr.arg): + self.on_POP_TOP(instr) + self._stack.append(instr) + + on_BUILD_TUPLE = on_BUILD_LIST + on_BUILD_STRING = on_BUILD_LIST + on_BUILD_TUPLE_UNPACK_WITH_CALL = on_BUILD_LIST + on_BUILD_TUPLE_UNPACK = on_BUILD_LIST + on_BUILD_LIST_UNPACK = on_BUILD_LIST + on_BUILD_MAP_UNPACK_WITH_CALL = on_BUILD_LIST + on_BUILD_MAP_UNPACK = on_BUILD_LIST + on_BUILD_SET = on_BUILD_LIST + on_BUILD_SET_UNPACK = on_BUILD_LIST + + on_SETUP_FINALLY = _no_stack_change + on_POP_FINALLY = _no_stack_change + on_BEGIN_FINALLY = _no_stack_change + on_END_FINALLY = _no_stack_change + + def on_RAISE_VARARGS(self, instr): + for _i in range(instr.arg): + self.on_POP_TOP(instr) + + on_POP_BLOCK = _no_stack_change + on_JUMP_FORWARD = _no_stack_change + on_POP_EXCEPT = _no_stack_change + on_SETUP_EXCEPT = _no_stack_change + on_WITH_EXCEPT_START = _no_stack_change + + on_END_FINALLY = _no_stack_change + on_BEGIN_FINALLY = _no_stack_change + on_SETUP_WITH = _no_stack_change + on_WITH_CLEANUP_START = _no_stack_change + on_WITH_CLEANUP_FINISH = _no_stack_change + on_FORMAT_VALUE = _no_stack_change + on_EXTENDED_ARG = _no_stack_change + + def on_INPLACE_ADD(self, instr): + # This would actually pop 2 and leave the value in the stack. + # In a += 1 it pop `a` and `1` and leave the resulting value + # for a load. In our case, let's just pop the `1` and leave the `a` + # instead of leaving the INPLACE_ADD bytecode. + try: + self._stack.pop() + except IndexError: + pass + + on_INPLACE_POWER = on_INPLACE_ADD + on_INPLACE_MULTIPLY = on_INPLACE_ADD + on_INPLACE_MATRIX_MULTIPLY = on_INPLACE_ADD + on_INPLACE_TRUE_DIVIDE = on_INPLACE_ADD + on_INPLACE_FLOOR_DIVIDE = on_INPLACE_ADD + on_INPLACE_MODULO = on_INPLACE_ADD + on_INPLACE_SUBTRACT = on_INPLACE_ADD + on_INPLACE_RSHIFT = on_INPLACE_ADD + on_INPLACE_LSHIFT = on_INPLACE_ADD + on_INPLACE_AND = on_INPLACE_ADD + on_INPLACE_OR = on_INPLACE_ADD + on_INPLACE_XOR = on_INPLACE_ADD + + def on_DUP_TOP(self, instr): + try: + i = self._stack[-1] + except IndexError: + # ok (in the start of block) + self._stack.append(instr) + else: + self._stack.append(i) + + def on_DUP_TOP_TWO(self, instr): + if len(self._stack) == 0: + self._stack.append(instr) + return + + if len(self._stack) == 1: + i = self._stack[-1] + self._stack.append(i) + self._stack.append(instr) + return + + i = self._stack[-1] + j = self._stack[-2] + self._stack.append(j) + self._stack.append(i) + + def on_BUILD_SLICE(self, instr): + for _ in range(instr.arg): + try: + self._stack.pop() + except IndexError: + pass + self._stack.append(instr) + + def on_STORE_SUBSCR(self, instr): + try: + self._stack.pop() + self._stack.pop() + self._stack.pop() + except IndexError: + pass + + def on_DELETE_SUBSCR(self, instr): + try: + self._stack.pop() + self._stack.pop() + except IndexError: + pass + + # Note: on Python 3 this is only found on interactive mode to print the results of + # some evaluation. + on_PRINT_EXPR = on_POP_TOP + + on_UNARY_POSITIVE = _no_stack_change + on_UNARY_NEGATIVE = _no_stack_change + on_UNARY_NOT = _no_stack_change + on_UNARY_INVERT = _no_stack_change + + on_CACHE = _no_stack_change + on_PRECALL = _no_stack_change + + +def _get_smart_step_into_targets(code): + ''' + :return list(Target) + ''' + b = bytecode.Bytecode.from_code(code) + cfg = bytecode_cfg.ControlFlowGraph.from_bytecode(b) + + ret = [] + + for block in cfg: + if DEBUG: + print('\nStart block----') + stack = _StackInterpreter(block) + for instr in block: + try: + func_name = 'on_%s' % (instr.name,) + func = getattr(stack, func_name, None) + + if DEBUG: + if instr.name != 'CACHE': # Filter the ones we don't want to see. + print('\nWill handle: ', instr, '>>', stack._getname(instr), '<<') + print('Current stack:') + for entry in stack._stack: + print(' arg:', stack._getname(entry), '(', entry, ')') + + if func is None: + if STRICT_MODE: + raise AssertionError('%s not found.' % (func_name,)) + else: + continue + func(instr) + except: + if STRICT_MODE: + raise # Error in strict mode. + else: + # In non-strict mode, log it (if in verbose mode) and keep on going. + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2: + pydev_log.exception('Exception computing step into targets (handled).') + + ret.extend(stack.function_calls) + # No longer considering attr loads as calls (while in theory sometimes it's possible + # that something as `some.attr` can turn out to be a property which could be stepped + # in, it's not that common in practice and can be surprising for users, so, disabling + # step into from stepping into properties). + # ret.extend(stack.load_attrs.values()) + + return ret + + +# Note that the offset is unique within the frame (so, we can use it as the target id). +# Also, as the offset is the instruction offset within the frame, it's possible to +# to inspect the parent frame for frame.f_lasti to know where we actually are (as the +# caller name may not always match the new frame name). +class Variant(object): + __slots__ = ['name', 'is_visited', 'line', 'offset', 'call_order', 'children_variants', 'parent'] + + def __init__(self, name, is_visited, line, offset, call_order, children_variants=None): + self.name = name + self.is_visited = is_visited + self.line = line + self.offset = offset + self.call_order = call_order + self.children_variants = children_variants + self.parent = None + if children_variants: + for variant in children_variants: + variant.parent = self + + def __repr__(self): + ret = [] + for s in self.__slots__: + if s == 'parent': + try: + parent = self.parent + except AttributeError: + ret.append('%s: ' % (s,)) + else: + if parent is None: + ret.append('parent: None') + else: + ret.append('parent: %s (%s)' % (parent.name, parent.offset)) + continue + + if s == 'children_variants': + ret.append('children_variants: %s' % (len(self.children_variants) if self.children_variants else 0)) + continue + + try: + ret.append('%s: %s' % (s, getattr(self, s))) + except AttributeError: + ret.append('%s: ' % (s,)) + return 'Variant(%s)' % ', '.join(ret) + + __str__ = __repr__ + + +def _convert_target_to_variant(target, start_line, end_line, call_order_cache, lasti, base): + name = target.arg + if not isinstance(name, str): + return + if target.lineno > end_line: + return + if target.lineno < start_line: + return + + call_order = call_order_cache.get(name, 0) + 1 + call_order_cache[name] = call_order + is_visited = target.offset <= lasti + + children_targets = target.children_targets + children_variants = None + if children_targets: + children_variants = [ + _convert_target_to_variant(child, start_line, end_line, call_order_cache, lasti, base) + for child in target.children_targets] + + return Variant(name, is_visited, target.lineno - base, target.offset, call_order, children_variants) + + +def calculate_smart_step_into_variants(frame, start_line, end_line, base=0): + """ + Calculate smart step into variants for the given line range. + :param frame: + :type frame: :py:class:`types.FrameType` + :param start_line: + :param end_line: + :return: A list of call names from the first to the last. + :note: it's guaranteed that the offsets appear in order. + :raise: :py:class:`RuntimeError` if failed to parse the bytecode or if dis cannot be used. + """ + variants = [] + code = frame.f_code + lasti = frame.f_lasti + + call_order_cache = {} + if DEBUG: + print('dis.dis:') + if IS_PY311_OR_GREATER: + dis.dis(code, show_caches=False) + else: + dis.dis(code) + + for target in _get_smart_step_into_targets(code): + variant = _convert_target_to_variant(target, start_line, end_line, call_order_cache, lasti, base) + if variant is None: + continue + variants.append(variant) + + return variants + + +def get_smart_step_into_variant_from_frame_offset(frame_f_lasti, variants): + """ + Given the frame.f_lasti, return the related `Variant`. + + :note: if the offset is found before any variant available or no variants are + available, None is returned. + + :rtype: Variant|NoneType + """ + if not variants: + return None + + i = bisect(KeyifyList(variants, lambda entry:entry.offset), frame_f_lasti) + + if i == 0: + return None + + else: + return variants[i - 1] diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_code_to_source.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_code_to_source.py new file mode 100644 index 0000000..40feb76 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_code_to_source.py @@ -0,0 +1,611 @@ +""" +Decompiler that can be used with the debugger (where statements correctly represent the +line numbers). + +Note: this is a work in progress / proof of concept / not ready to be used. +""" + +import dis + +from _pydevd_bundle.pydevd_collect_bytecode_info import iter_instructions +from _pydev_bundle import pydev_log +import sys +import inspect +from io import StringIO + + +class _Stack(object): + + def __init__(self): + self._contents = [] + + def push(self, obj): + # print('push', obj) + self._contents.append(obj) + + def pop(self): + return self._contents.pop(-1) + + +INDENT_MARKER = object() +DEDENT_MARKER = object() +_SENTINEL = object() + +DEBUG = False + + +class _Token(object): + + def __init__(self, i_line, instruction=None, tok=_SENTINEL, priority=0, after=None, end_of_line=False): + ''' + :param i_line: + :param instruction: + :param tok: + :param priority: + :param after: + :param end_of_line: + Marker to signal only after all the other tokens have been written. + ''' + self.i_line = i_line + if tok is not _SENTINEL: + self.tok = tok + else: + if instruction is not None: + if inspect.iscode(instruction.argval): + self.tok = '' + else: + self.tok = str(instruction.argval) + else: + raise AssertionError('Either the tok or the instruction is needed.') + self.instruction = instruction + self.priority = priority + self.end_of_line = end_of_line + self._after_tokens = set() + self._after_handler_tokens = set() + if after: + self.mark_after(after) + + def mark_after(self, v): + if isinstance(v, _Token): + self._after_tokens.add(v) + elif isinstance(v, _BaseHandler): + self._after_handler_tokens.add(v) + + else: + raise AssertionError('Unhandled: %s' % (v,)) + + def get_after_tokens(self): + ret = self._after_tokens.copy() + for handler in self._after_handler_tokens: + ret.update(handler.tokens) + return ret + + def __repr__(self): + return 'Token(%s, after: %s)' % (self.tok, self.get_after_tokens()) + + __str__ = __repr__ + + +class _Writer(object): + + def __init__(self): + self.line_to_contents = {} + self.all_tokens = set() + + def get_line(self, line): + lst = self.line_to_contents.get(line) + if lst is None: + lst = self.line_to_contents[line] = [] + return lst + + def indent(self, line): + self.get_line(line).append(INDENT_MARKER) + + def dedent(self, line): + self.get_line(line).append(DEDENT_MARKER) + + def write(self, line, token): + if token in self.all_tokens: + return + self.all_tokens.add(token) + assert isinstance(token, _Token) + lst = self.get_line(line) + lst.append(token) + + +class _BaseHandler(object): + + def __init__(self, i_line, instruction, stack, writer, disassembler): + self.i_line = i_line + self.instruction = instruction + self.stack = stack + self.writer = writer + self.disassembler = disassembler + self.tokens = [] + self._handle() + + def _write_tokens(self): + for token in self.tokens: + self.writer.write(token.i_line, token) + + def _handle(self): + raise NotImplementedError(self) + + def __repr__(self, *args, **kwargs): + try: + return "%s line:%s" % (self.instruction, self.i_line) + except: + return object.__repr__(self) + + __str__ = __repr__ + + +_op_name_to_handler = {} + + +def _register(cls): + _op_name_to_handler[cls.opname] = cls + return cls + + +class _BasePushHandler(_BaseHandler): + + def _handle(self): + self.stack.push(self) + + +class _BaseLoadHandler(_BasePushHandler): + + def _handle(self): + _BasePushHandler._handle(self) + self.tokens = [_Token(self.i_line, self.instruction)] + + +@_register +class _LoadBuildClass(_BasePushHandler): + opname = "LOAD_BUILD_CLASS" + + +@_register +class _LoadConst(_BaseLoadHandler): + opname = "LOAD_CONST" + + +@_register +class _LoadName(_BaseLoadHandler): + opname = "LOAD_NAME" + + +@_register +class _LoadGlobal(_BaseLoadHandler): + opname = "LOAD_GLOBAL" + + +@_register +class _LoadFast(_BaseLoadHandler): + opname = "LOAD_FAST" + + +@_register +class _GetIter(_BaseHandler): + ''' + Implements TOS = iter(TOS). + ''' + opname = "GET_ITER" + iter_target = None + + def _handle(self): + self.iter_target = self.stack.pop() + self.tokens.extend(self.iter_target.tokens) + self.stack.push(self) + + +@_register +class _ForIter(_BaseHandler): + ''' + TOS is an iterator. Call its __next__() method. If this yields a new value, push it on the stack + (leaving the iterator below it). If the iterator indicates it is exhausted TOS is popped, and + the byte code counter is incremented by delta. + ''' + opname = "FOR_ITER" + + iter_in = None + + def _handle(self): + self.iter_in = self.stack.pop() + self.stack.push(self) + + def store_in_name(self, store_name): + for_token = _Token(self.i_line, None, 'for ') + self.tokens.append(for_token) + prev = for_token + + t_name = _Token(store_name.i_line, store_name.instruction, after=prev) + self.tokens.append(t_name) + prev = t_name + + in_token = _Token(store_name.i_line, None, ' in ', after=prev) + self.tokens.append(in_token) + prev = in_token + + max_line = store_name.i_line + if self.iter_in: + for t in self.iter_in.tokens: + t.mark_after(prev) + max_line = max(max_line, t.i_line) + prev = t + self.tokens.extend(self.iter_in.tokens) + + colon_token = _Token(self.i_line, None, ':', after=prev) + self.tokens.append(colon_token) + prev = for_token + + self._write_tokens() + + +@_register +class _StoreName(_BaseHandler): + ''' + Implements name = TOS. namei is the index of name in the attribute co_names of the code object. + The compiler tries to use STORE_FAST or STORE_GLOBAL if possible. + ''' + + opname = "STORE_NAME" + + def _handle(self): + v = self.stack.pop() + + if isinstance(v, _ForIter): + v.store_in_name(self) + else: + if not isinstance(v, _MakeFunction) or v.is_lambda: + line = self.i_line + for t in v.tokens: + line = min(line, t.i_line) + + t_name = _Token(line, self.instruction) + t_equal = _Token(line, None, '=', after=t_name) + + self.tokens.append(t_name) + self.tokens.append(t_equal) + + for t in v.tokens: + t.mark_after(t_equal) + self.tokens.extend(v.tokens) + + self._write_tokens() + + +@_register +class _ReturnValue(_BaseHandler): + """ + Returns with TOS to the caller of the function. + """ + + opname = "RETURN_VALUE" + + def _handle(self): + v = self.stack.pop() + return_token = _Token(self.i_line, None, 'return ', end_of_line=True) + self.tokens.append(return_token) + for token in v.tokens: + token.mark_after(return_token) + self.tokens.extend(v.tokens) + + self._write_tokens() + + +@_register +class _CallFunction(_BaseHandler): + """ + + CALL_FUNCTION(argc) + + Calls a callable object with positional arguments. argc indicates the number of positional + arguments. The top of the stack contains positional arguments, with the right-most argument + on top. Below the arguments is a callable object to call. CALL_FUNCTION pops all arguments + and the callable object off the stack, calls the callable object with those arguments, and + pushes the return value returned by the callable object. + + Changed in version 3.6: This opcode is used only for calls with positional arguments. + + """ + + opname = "CALL_FUNCTION" + + def _handle(self): + args = [] + for _i in range(self.instruction.argval + 1): + arg = self.stack.pop() + args.append(arg) + it = reversed(args) + name = next(it) + max_line = name.i_line + for t in name.tokens: + self.tokens.append(t) + + tok_open_parens = _Token(name.i_line, None, '(', after=name) + self.tokens.append(tok_open_parens) + + prev = tok_open_parens + for i, arg in enumerate(it): + for t in arg.tokens: + t.mark_after(name) + t.mark_after(prev) + max_line = max(max_line, t.i_line) + self.tokens.append(t) + prev = arg + + if i > 0: + comma_token = _Token(prev.i_line, None, ',', after=prev) + self.tokens.append(comma_token) + prev = comma_token + + tok_close_parens = _Token(max_line, None, ')', after=prev) + self.tokens.append(tok_close_parens) + + self._write_tokens() + + self.stack.push(self) + + +@_register +class _MakeFunctionPy3(_BaseHandler): + """ + Pushes a new function object on the stack. From bottom to top, the consumed stack must consist + of values if the argument carries a specified flag value + + 0x01 a tuple of default values for positional-only and positional-or-keyword parameters in positional order + + 0x02 a dictionary of keyword-only parameters' default values + + 0x04 an annotation dictionary + + 0x08 a tuple containing cells for free variables, making a closure + + the code associated with the function (at TOS1) + + the qualified name of the function (at TOS) + """ + + opname = "MAKE_FUNCTION" + is_lambda = False + + def _handle(self): + stack = self.stack + self.qualified_name = stack.pop() + self.code = stack.pop() + + default_node = None + if self.instruction.argval & 0x01: + default_node = stack.pop() + + is_lambda = self.is_lambda = '' in [x.tok for x in self.qualified_name.tokens] + + if not is_lambda: + def_token = _Token(self.i_line, None, 'def ') + self.tokens.append(def_token) + + for token in self.qualified_name.tokens: + self.tokens.append(token) + if not is_lambda: + token.mark_after(def_token) + prev = token + + open_parens_token = _Token(self.i_line, None, '(', after=prev) + self.tokens.append(open_parens_token) + prev = open_parens_token + + code = self.code.instruction.argval + + if default_node: + defaults = ([_SENTINEL] * (len(code.co_varnames) - len(default_node.instruction.argval))) + list(default_node.instruction.argval) + else: + defaults = [_SENTINEL] * len(code.co_varnames) + + for i, arg in enumerate(code.co_varnames): + if i > 0: + comma_token = _Token(prev.i_line, None, ', ', after=prev) + self.tokens.append(comma_token) + prev = comma_token + + arg_token = _Token(self.i_line, None, arg, after=prev) + self.tokens.append(arg_token) + + default = defaults[i] + if default is not _SENTINEL: + eq_token = _Token(default_node.i_line, None, '=', after=prev) + self.tokens.append(eq_token) + prev = eq_token + + default_token = _Token(default_node.i_line, None, str(default), after=prev) + self.tokens.append(default_token) + prev = default_token + + tok_close_parens = _Token(prev.i_line, None, '):', after=prev) + self.tokens.append(tok_close_parens) + + self._write_tokens() + + stack.push(self) + self.writer.indent(prev.i_line + 1) + self.writer.dedent(max(self.disassembler.merge_code(code))) + + +_MakeFunction = _MakeFunctionPy3 + + +def _print_after_info(line_contents, stream=None): + if stream is None: + stream = sys.stdout + for token in line_contents: + after_tokens = token.get_after_tokens() + if after_tokens: + s = '%s after: %s\n' % ( + repr(token.tok), + ('"' + '", "'.join(t.tok for t in token.get_after_tokens()) + '"')) + stream.write(s) + else: + stream.write('%s (NO REQUISITES)' % repr(token.tok)) + + +def _compose_line_contents(line_contents, previous_line_tokens): + lst = [] + handled = set() + + add_to_end_of_line = [] + delete_indexes = [] + for i, token in enumerate(line_contents): + if token.end_of_line: + add_to_end_of_line.append(token) + delete_indexes.append(i) + for i in reversed(delete_indexes): + del line_contents[i] + del delete_indexes + + while line_contents: + added = False + delete_indexes = [] + + for i, token in enumerate(line_contents): + after_tokens = token.get_after_tokens() + for after in after_tokens: + if after not in handled and after not in previous_line_tokens: + break + else: + added = True + previous_line_tokens.add(token) + handled.add(token) + lst.append(token.tok) + delete_indexes.append(i) + + for i in reversed(delete_indexes): + del line_contents[i] + + if not added: + if add_to_end_of_line: + line_contents.extend(add_to_end_of_line) + del add_to_end_of_line[:] + continue + + # Something is off, let's just add as is. + for token in line_contents: + if token not in handled: + lst.append(token.tok) + + stream = StringIO() + _print_after_info(line_contents, stream) + pydev_log.critical('Error. After markers are not correct:\n%s', stream.getvalue()) + break + return ''.join(lst) + + +class _PyCodeToSource(object): + + def __init__(self, co, memo=None): + if memo is None: + memo = {} + self.memo = memo + self.co = co + self.instructions = list(iter_instructions(co)) + self.stack = _Stack() + self.writer = _Writer() + + def _process_next(self, i_line): + instruction = self.instructions.pop(0) + handler_class = _op_name_to_handler.get(instruction.opname) + if handler_class is not None: + s = handler_class(i_line, instruction, self.stack, self.writer, self) + if DEBUG: + print(s) + + else: + if DEBUG: + print("UNHANDLED", instruction) + + def build_line_to_contents(self): + co = self.co + + op_offset_to_line = dict(dis.findlinestarts(co)) + curr_line_index = 0 + + instructions = self.instructions + while instructions: + instruction = instructions[0] + new_line_index = op_offset_to_line.get(instruction.offset) + if new_line_index is not None: + if new_line_index is not None: + curr_line_index = new_line_index + + self._process_next(curr_line_index) + return self.writer.line_to_contents + + def merge_code(self, code): + if DEBUG: + print('merge code ----') + # for d in dir(code): + # if not d.startswith('_'): + # print(d, getattr(code, d)) + line_to_contents = _PyCodeToSource(code, self.memo).build_line_to_contents() + lines = [] + for line, contents in sorted(line_to_contents.items()): + lines.append(line) + self.writer.get_line(line).extend(contents) + if DEBUG: + print('end merge code ----') + return lines + + def disassemble(self): + show_lines = False + line_to_contents = self.build_line_to_contents() + stream = StringIO() + last_line = 0 + indent = '' + previous_line_tokens = set() + for i_line, contents in sorted(line_to_contents.items()): + while last_line < i_line - 1: + if show_lines: + stream.write(u"%s.\n" % (last_line + 1,)) + else: + stream.write(u"\n") + last_line += 1 + + line_contents = [] + dedents_found = 0 + for part in contents: + if part is INDENT_MARKER: + if DEBUG: + print('found indent', i_line) + indent += ' ' + continue + if part is DEDENT_MARKER: + if DEBUG: + print('found dedent', i_line) + dedents_found += 1 + continue + line_contents.append(part) + + s = indent + _compose_line_contents(line_contents, previous_line_tokens) + if show_lines: + stream.write(u"%s. %s\n" % (i_line, s)) + else: + stream.write(u"%s\n" % s) + + if dedents_found: + indent = indent[:-(4 * dedents_found)] + last_line = i_line + + return stream.getvalue() + + +def code_obj_to_source(co): + """ + Converts a code object to source code to provide a suitable representation for the compiler when + the actual source code is not found. + + This is a work in progress / proof of concept / not ready to be used. + """ + ret = _PyCodeToSource(co).disassemble() + if DEBUG: + print(ret) + return ret diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_collect_bytecode_info.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_collect_bytecode_info.py new file mode 100644 index 0000000..711f7dd --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_collect_bytecode_info.py @@ -0,0 +1,925 @@ +import dis +import inspect +import sys +from collections import namedtuple + +from _pydev_bundle import pydev_log +from opcode import (EXTENDED_ARG, HAVE_ARGUMENT, cmp_op, hascompare, hasconst, + hasfree, hasjrel, haslocal, hasname, opname) + +from io import StringIO + + +class TryExceptInfo(object): + + def __init__(self, try_line, ignore=False): + ''' + :param try_line: + :param ignore: + Usually we should ignore any block that's not a try..except + (this can happen for finally blocks, with statements, etc, for + which we create temporary entries). + ''' + self.try_line = try_line + self.ignore = ignore + self.except_line = -1 + self.except_end_line = -1 + self.raise_lines_in_except = [] + + # Note: these may not be available if generated from source instead of bytecode. + self.except_bytecode_offset = -1 + self.except_end_bytecode_offset = -1 + + def is_line_in_try_block(self, line): + return self.try_line <= line < self.except_line + + def is_line_in_except_block(self, line): + return self.except_line <= line <= self.except_end_line + + def __str__(self): + lst = [ + '{try:', + str(self.try_line), + ' except ', + str(self.except_line), + ' end block ', + str(self.except_end_line), + ] + if self.raise_lines_in_except: + lst.append(' raises: %s' % (', '.join(str(x) for x in self.raise_lines_in_except),)) + + lst.append('}') + return ''.join(lst) + + __repr__ = __str__ + + +class ReturnInfo(object): + + def __init__(self, return_line): + self.return_line = return_line + + def __str__(self): + return '{return: %s}' % (self.return_line,) + + __repr__ = __str__ + + +def _get_line(op_offset_to_line, op_offset, firstlineno, search=False): + op_offset_original = op_offset + while op_offset >= 0: + ret = op_offset_to_line.get(op_offset) + if ret is not None: + return ret - firstlineno + if not search: + return ret + else: + op_offset -= 1 + raise AssertionError('Unable to find line for offset: %s.Info: %s' % ( + op_offset_original, op_offset_to_line)) + + +def debug(s): + pass + + +_Instruction = namedtuple('_Instruction', 'opname, opcode, starts_line, argval, is_jump_target, offset, argrepr') + + +def _iter_as_bytecode_as_instructions_py2(co): + code = co.co_code + op_offset_to_line = dict(dis.findlinestarts(co)) + labels = set(dis.findlabels(code)) + bytecode_len = len(code) + i = 0 + extended_arg = 0 + free = None + + op_to_name = opname + + while i < bytecode_len: + c = code[i] + op = ord(c) + is_jump_target = i in labels + + curr_op_name = op_to_name[op] + initial_bytecode_offset = i + + i = i + 1 + if op < HAVE_ARGUMENT: + yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), None, is_jump_target, initial_bytecode_offset, '') + + else: + oparg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg + + extended_arg = 0 + i = i + 2 + if op == EXTENDED_ARG: + extended_arg = oparg * 65536 + + if op in hasconst: + yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), co.co_consts[oparg], is_jump_target, initial_bytecode_offset, repr(co.co_consts[oparg])) + elif op in hasname: + yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), co.co_names[oparg], is_jump_target, initial_bytecode_offset, str(co.co_names[oparg])) + elif op in hasjrel: + argval = i + oparg + yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), argval, is_jump_target, initial_bytecode_offset, "to " + repr(argval)) + elif op in haslocal: + yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), co.co_varnames[oparg], is_jump_target, initial_bytecode_offset, str(co.co_varnames[oparg])) + elif op in hascompare: + yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), cmp_op[oparg], is_jump_target, initial_bytecode_offset, cmp_op[oparg]) + elif op in hasfree: + if free is None: + free = co.co_cellvars + co.co_freevars + yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), free[oparg], is_jump_target, initial_bytecode_offset, str(free[oparg])) + else: + yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), oparg, is_jump_target, initial_bytecode_offset, str(oparg)) + + +def iter_instructions(co): + if sys.version_info[0] < 3: + iter_in = _iter_as_bytecode_as_instructions_py2(co) + else: + iter_in = dis.Bytecode(co) + iter_in = list(iter_in) + + bytecode_to_instruction = {} + for instruction in iter_in: + bytecode_to_instruction[instruction.offset] = instruction + + if iter_in: + for instruction in iter_in: + yield instruction + + +def collect_return_info(co, use_func_first_line=False): + if not hasattr(co, 'co_lnotab'): + return [] + + if use_func_first_line: + firstlineno = co.co_firstlineno + else: + firstlineno = 0 + + lst = [] + op_offset_to_line = dict(dis.findlinestarts(co)) + for instruction in iter_instructions(co): + curr_op_name = instruction.opname + if curr_op_name == 'RETURN_VALUE': + lst.append(ReturnInfo(_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True))) + + return lst + + +if sys.version_info[:2] <= (3, 9): + + class _TargetInfo(object): + + def __init__(self, except_end_instruction, jump_if_not_exc_instruction=None): + self.except_end_instruction = except_end_instruction + self.jump_if_not_exc_instruction = jump_if_not_exc_instruction + + def __str__(self): + msg = ['_TargetInfo('] + msg.append(self.except_end_instruction.opname) + if self.jump_if_not_exc_instruction: + msg.append(' - ') + msg.append(self.jump_if_not_exc_instruction.opname) + msg.append('(') + msg.append(str(self.jump_if_not_exc_instruction.argval)) + msg.append(')') + msg.append(')') + return ''.join(msg) + + def _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx): + next_3 = [j_instruction.opname for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]] + # print('next_3:', [(j_instruction.opname, j_instruction.argval) for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]]) + if next_3 == ['POP_TOP', 'POP_TOP', 'POP_TOP']: # try..except without checking exception. + try: + jump_instruction = instructions[exception_end_instruction_index - 1] + if jump_instruction.opname not in ('JUMP_FORWARD', 'JUMP_ABSOLUTE'): + return None + except IndexError: + pass + + if jump_instruction.opname == 'JUMP_ABSOLUTE': + # On latest versions of Python 3 the interpreter has a go-backwards step, + # used to show the initial line of a for/while, etc (which is this + # JUMP_ABSOLUTE)... we're not really interested in it, but rather on where + # it points to. + except_end_instruction = instructions[offset_to_instruction_idx[jump_instruction.argval]] + idx = offset_to_instruction_idx[except_end_instruction.argval] + # Search for the POP_EXCEPT which should be at the end of the block. + for pop_except_instruction in reversed(instructions[:idx]): + if pop_except_instruction.opname == 'POP_EXCEPT': + except_end_instruction = pop_except_instruction + return _TargetInfo(except_end_instruction) + else: + return None # i.e.: Continue outer loop + + else: + # JUMP_FORWARD + i = offset_to_instruction_idx[jump_instruction.argval] + try: + # i.e.: the jump is to the instruction after the block finishes (so, we need to + # get the previous instruction as that should be the place where the exception + # block finishes). + except_end_instruction = instructions[i - 1] + except: + pydev_log.critical('Error when computing try..except block end.') + return None + return _TargetInfo(except_end_instruction) + + elif next_3 and next_3[0] == 'DUP_TOP': # try..except AssertionError. + iter_in = instructions[exception_end_instruction_index + 1:] + for j, jump_if_not_exc_instruction in enumerate(iter_in): + if jump_if_not_exc_instruction.opname == 'JUMP_IF_NOT_EXC_MATCH': + # Python 3.9 + except_end_instruction = instructions[offset_to_instruction_idx[jump_if_not_exc_instruction.argval]] + return _TargetInfo(except_end_instruction, jump_if_not_exc_instruction) + + elif jump_if_not_exc_instruction.opname == 'COMPARE_OP' and jump_if_not_exc_instruction.argval == 'exception match': + # Python 3.8 and before + try: + next_instruction = iter_in[j + 1] + except: + continue + if next_instruction.opname == 'POP_JUMP_IF_FALSE': + except_end_instruction = instructions[offset_to_instruction_idx[next_instruction.argval]] + return _TargetInfo(except_end_instruction, next_instruction) + else: + return None # i.e.: Continue outer loop + + else: + # i.e.: we're not interested in try..finally statements, only try..except. + return None + + def collect_try_except_info(co, use_func_first_line=False): + # We no longer have 'END_FINALLY', so, we need to do things differently in Python 3.9 + if not hasattr(co, 'co_lnotab'): + return [] + + if use_func_first_line: + firstlineno = co.co_firstlineno + else: + firstlineno = 0 + + try_except_info_lst = [] + + op_offset_to_line = dict(dis.findlinestarts(co)) + + offset_to_instruction_idx = {} + + instructions = list(iter_instructions(co)) + + for i, instruction in enumerate(instructions): + offset_to_instruction_idx[instruction.offset] = i + + for i, instruction in enumerate(instructions): + curr_op_name = instruction.opname + if curr_op_name in ('SETUP_FINALLY', 'SETUP_EXCEPT'): # SETUP_EXCEPT before Python 3.8, SETUP_FINALLY Python 3.8 onwards. + exception_end_instruction_index = offset_to_instruction_idx[instruction.argval] + + jump_instruction = instructions[exception_end_instruction_index - 1] + if jump_instruction.opname not in ('JUMP_FORWARD', 'JUMP_ABSOLUTE'): + continue + + except_end_instruction = None + indexes_checked = set() + indexes_checked.add(exception_end_instruction_index) + target_info = _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx) + while target_info is not None: + # Handle a try..except..except..except. + jump_instruction = target_info.jump_if_not_exc_instruction + except_end_instruction = target_info.except_end_instruction + + if jump_instruction is not None: + check_index = offset_to_instruction_idx[jump_instruction.argval] + if check_index in indexes_checked: + break + indexes_checked.add(check_index) + target_info = _get_except_target_info(instructions, check_index, offset_to_instruction_idx) + else: + break + + if except_end_instruction is not None: + try_except_info = TryExceptInfo( + _get_line(op_offset_to_line, instruction.offset, firstlineno, search=True), + ignore=False + ) + try_except_info.except_bytecode_offset = instruction.argval + try_except_info.except_line = _get_line( + op_offset_to_line, + try_except_info.except_bytecode_offset, + firstlineno, + search=True + ) + + try_except_info.except_end_bytecode_offset = except_end_instruction.offset + try_except_info.except_end_line = _get_line(op_offset_to_line, except_end_instruction.offset, firstlineno, search=True) + try_except_info_lst.append(try_except_info) + + for raise_instruction in instructions[i:offset_to_instruction_idx[try_except_info.except_end_bytecode_offset]]: + if raise_instruction.opname == 'RAISE_VARARGS': + if raise_instruction.argval == 0: + try_except_info.raise_lines_in_except.append( + _get_line(op_offset_to_line, raise_instruction.offset, firstlineno, search=True)) + + return try_except_info_lst + +elif sys.version_info[:2] == (3, 10): + + class _TargetInfo(object): + + def __init__(self, except_end_instruction, jump_if_not_exc_instruction=None): + self.except_end_instruction = except_end_instruction + self.jump_if_not_exc_instruction = jump_if_not_exc_instruction + + def __str__(self): + msg = ['_TargetInfo('] + msg.append(self.except_end_instruction.opname) + if self.jump_if_not_exc_instruction: + msg.append(' - ') + msg.append(self.jump_if_not_exc_instruction.opname) + msg.append('(') + msg.append(str(self.jump_if_not_exc_instruction.argval)) + msg.append(')') + msg.append(')') + return ''.join(msg) + + def _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx): + next_3 = [j_instruction.opname for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]] + # print('next_3:', [(j_instruction.opname, j_instruction.argval) for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]]) + if next_3 == ['POP_TOP', 'POP_TOP', 'POP_TOP']: # try..except without checking exception. + # Previously there was a jump which was able to point where the exception would end. This + # is no longer true, now a bare except doesn't really have any indication in the bytecode + # where the end would be expected if the exception wasn't raised, so, we just blindly + # search for a POP_EXCEPT from the current position. + for pop_except_instruction in instructions[exception_end_instruction_index + 3:]: + if pop_except_instruction.opname == 'POP_EXCEPT': + except_end_instruction = pop_except_instruction + return _TargetInfo(except_end_instruction) + + elif next_3 and next_3[0] == 'DUP_TOP': # try..except AssertionError. + iter_in = instructions[exception_end_instruction_index + 1:] + for jump_if_not_exc_instruction in iter_in: + if jump_if_not_exc_instruction.opname == 'JUMP_IF_NOT_EXC_MATCH': + # Python 3.9 + except_end_instruction = instructions[offset_to_instruction_idx[jump_if_not_exc_instruction.argval]] + return _TargetInfo(except_end_instruction, jump_if_not_exc_instruction) + else: + return None # i.e.: Continue outer loop + + else: + # i.e.: we're not interested in try..finally statements, only try..except. + return None + + def collect_try_except_info(co, use_func_first_line=False): + # We no longer have 'END_FINALLY', so, we need to do things differently in Python 3.9 + if not hasattr(co, 'co_lnotab'): + return [] + + if use_func_first_line: + firstlineno = co.co_firstlineno + else: + firstlineno = 0 + + try_except_info_lst = [] + + op_offset_to_line = dict(dis.findlinestarts(co)) + + offset_to_instruction_idx = {} + + instructions = list(iter_instructions(co)) + + for i, instruction in enumerate(instructions): + offset_to_instruction_idx[instruction.offset] = i + + for i, instruction in enumerate(instructions): + curr_op_name = instruction.opname + if curr_op_name == 'SETUP_FINALLY': + exception_end_instruction_index = offset_to_instruction_idx[instruction.argval] + + jump_instruction = instructions[exception_end_instruction_index] + if jump_instruction.opname != 'DUP_TOP': + continue + + except_end_instruction = None + indexes_checked = set() + indexes_checked.add(exception_end_instruction_index) + target_info = _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx) + while target_info is not None: + # Handle a try..except..except..except. + jump_instruction = target_info.jump_if_not_exc_instruction + except_end_instruction = target_info.except_end_instruction + + if jump_instruction is not None: + check_index = offset_to_instruction_idx[jump_instruction.argval] + if check_index in indexes_checked: + break + indexes_checked.add(check_index) + target_info = _get_except_target_info(instructions, check_index, offset_to_instruction_idx) + else: + break + + if except_end_instruction is not None: + try_except_info = TryExceptInfo( + _get_line(op_offset_to_line, instruction.offset, firstlineno, search=True), + ignore=False + ) + try_except_info.except_bytecode_offset = instruction.argval + try_except_info.except_line = _get_line( + op_offset_to_line, + try_except_info.except_bytecode_offset, + firstlineno, + search=True + ) + + try_except_info.except_end_bytecode_offset = except_end_instruction.offset + + # On Python 3.10 the final line of the except end isn't really correct, rather, + # it's engineered to be the same line of the except and not the end line of the + # block, so, the approach taken is to search for the biggest line between the + # except and the end instruction + except_end_line = -1 + start_i = offset_to_instruction_idx[try_except_info.except_bytecode_offset] + end_i = offset_to_instruction_idx[except_end_instruction.offset] + for instruction in instructions[start_i: end_i + 1]: + found_at_line = op_offset_to_line.get(instruction.offset) + if found_at_line is not None and found_at_line > except_end_line: + except_end_line = found_at_line + try_except_info.except_end_line = except_end_line - firstlineno + + try_except_info_lst.append(try_except_info) + + for raise_instruction in instructions[i:offset_to_instruction_idx[try_except_info.except_end_bytecode_offset]]: + if raise_instruction.opname == 'RAISE_VARARGS': + if raise_instruction.argval == 0: + try_except_info.raise_lines_in_except.append( + _get_line(op_offset_to_line, raise_instruction.offset, firstlineno, search=True)) + + return try_except_info_lst + +elif sys.version_info[:2] >= (3, 11): + + def collect_try_except_info(co, use_func_first_line=False): + ''' + Note: if the filename is available and we can get the source, + `collect_try_except_info_from_source` is preferred (this is kept as + a fallback for cases where sources aren't available). + ''' + return [] + +import ast as ast_module + + +class _Visitor(ast_module.NodeVisitor): + + def __init__(self): + self.try_except_infos = [] + self._stack = [] + self._in_except_stack = [] + self.max_line = -1 + + def generic_visit(self, node): + if hasattr(node, 'lineno'): + if node.lineno > self.max_line: + self.max_line = node.lineno + return ast_module.NodeVisitor.generic_visit(self, node) + + def visit_Try(self, node): + info = TryExceptInfo(node.lineno, ignore=True) + self._stack.append(info) + self.generic_visit(node) + assert info is self._stack.pop() + if not info.ignore: + self.try_except_infos.insert(0, info) + + if sys.version_info[0] < 3: + visit_TryExcept = visit_Try + + def visit_ExceptHandler(self, node): + info = self._stack[-1] + info.ignore = False + if info.except_line == -1: + info.except_line = node.lineno + self._in_except_stack.append(info) + self.generic_visit(node) + if hasattr(node, 'end_lineno'): + info.except_end_line = node.end_lineno + else: + info.except_end_line = self.max_line + self._in_except_stack.pop() + + if sys.version_info[0] >= 3: + + def visit_Raise(self, node): + for info in self._in_except_stack: + if node.exc is None: + info.raise_lines_in_except.append(node.lineno) + self.generic_visit(node) + + else: + + def visit_Raise(self, node): + for info in self._in_except_stack: + if node.type is None and node.tback is None: + info.raise_lines_in_except.append(node.lineno) + self.generic_visit(node) + + +def collect_try_except_info_from_source(filename): + with open(filename, 'rb') as stream: + contents = stream.read() + return collect_try_except_info_from_contents(contents, filename) + + +def collect_try_except_info_from_contents(contents, filename=''): + ast = ast_module.parse(contents, filename) + visitor = _Visitor() + visitor.visit(ast) + return visitor.try_except_infos + + +RESTART_FROM_LOOKAHEAD = object() +SEPARATOR = object() + + +class _MsgPart(object): + + def __init__(self, line, tok): + assert line >= 0 + self.line = line + self.tok = tok + + @classmethod + def add_to_line_to_contents(cls, obj, line_to_contents, line=None): + if isinstance(obj, (list, tuple)): + for o in obj: + cls.add_to_line_to_contents(o, line_to_contents, line=line) + return + + if isinstance(obj, str): + assert line is not None + line = int(line) + lst = line_to_contents.setdefault(line, []) + lst.append(obj) + return + + if isinstance(obj, _MsgPart): + if isinstance(obj.tok, (list, tuple)): + cls.add_to_line_to_contents(obj.tok, line_to_contents, line=obj.line) + return + + if isinstance(obj.tok, str): + lst = line_to_contents.setdefault(obj.line, []) + lst.append(obj.tok) + return + + raise AssertionError("Unhandled: %" % (obj,)) + + +class _Disassembler(object): + + def __init__(self, co, firstlineno, level=0): + self.co = co + self.firstlineno = firstlineno + self.level = level + self.instructions = list(iter_instructions(co)) + op_offset_to_line = self.op_offset_to_line = dict(dis.findlinestarts(co)) + + # Update offsets so that all offsets have the line index (and update it based on + # the passed firstlineno). + line_index = co.co_firstlineno - firstlineno + for instruction in self.instructions: + new_line_index = op_offset_to_line.get(instruction.offset) + if new_line_index is not None: + line_index = new_line_index - firstlineno + op_offset_to_line[instruction.offset] = line_index + else: + op_offset_to_line[instruction.offset] = line_index + + BIG_LINE_INT = 9999999 + SMALL_LINE_INT = -1 + + def min_line(self, *args): + m = self.BIG_LINE_INT + for arg in args: + if isinstance(arg, (list, tuple)): + m = min(m, self.min_line(*arg)) + + elif isinstance(arg, _MsgPart): + m = min(m, arg.line) + + elif hasattr(arg, 'offset'): + m = min(m, self.op_offset_to_line[arg.offset]) + return m + + def max_line(self, *args): + m = self.SMALL_LINE_INT + for arg in args: + if isinstance(arg, (list, tuple)): + m = max(m, self.max_line(*arg)) + + elif isinstance(arg, _MsgPart): + m = max(m, arg.line) + + elif hasattr(arg, 'offset'): + m = max(m, self.op_offset_to_line[arg.offset]) + return m + + def _lookahead(self): + ''' + This handles and converts some common constructs from bytecode to actual source code. + + It may change the list of instructions. + ''' + msg = self._create_msg_part + found = [] + fullrepr = None + + # Collect all the load instructions + for next_instruction in self.instructions: + if next_instruction.opname in ('LOAD_GLOBAL', 'LOAD_FAST', 'LOAD_CONST', 'LOAD_NAME'): + found.append(next_instruction) + else: + break + + if not found: + return None + + if next_instruction.opname == 'LOAD_ATTR': + prev_instruction = found[-1] + # Remove the current LOAD_ATTR + assert self.instructions.pop(len(found)) is next_instruction + + # Add the LOAD_ATTR to the previous LOAD + self.instructions[len(found) - 1] = _Instruction( + prev_instruction.opname, + prev_instruction.opcode, + prev_instruction.starts_line, + prev_instruction.argval, + False, # prev_instruction.is_jump_target, + prev_instruction.offset, + ( + msg(prev_instruction), + msg(prev_instruction, '.'), + msg(next_instruction) + ), + ) + return RESTART_FROM_LOOKAHEAD + + if next_instruction.opname in ('CALL_FUNCTION', 'PRECALL'): + if len(found) == next_instruction.argval + 1: + force_restart = False + delta = 0 + else: + force_restart = True + if len(found) > next_instruction.argval + 1: + delta = len(found) - (next_instruction.argval + 1) + else: + return None # This is odd + + del_upto = delta + next_instruction.argval + 2 # +2 = NAME / CALL_FUNCTION + if next_instruction.opname == 'PRECALL': + del_upto += 1 # Also remove the CALL right after the PRECALL. + del self.instructions[delta:del_upto] + + found = iter(found[delta:]) + call_func = next(found) + args = list(found) + fullrepr = [ + msg(call_func), + msg(call_func, '('), + ] + prev = call_func + for i, arg in enumerate(args): + if i > 0: + fullrepr.append(msg(prev, ', ')) + prev = arg + fullrepr.append(msg(arg)) + + fullrepr.append(msg(prev, ')')) + + if force_restart: + self.instructions.insert(delta, _Instruction( + call_func.opname, + call_func.opcode, + call_func.starts_line, + call_func.argval, + False, # call_func.is_jump_target, + call_func.offset, + tuple(fullrepr), + )) + return RESTART_FROM_LOOKAHEAD + + elif next_instruction.opname == 'BUILD_TUPLE': + + if len(found) == next_instruction.argval: + force_restart = False + delta = 0 + else: + force_restart = True + if len(found) > next_instruction.argval: + delta = len(found) - (next_instruction.argval) + else: + return None # This is odd + + del self.instructions[delta:delta + next_instruction.argval + 1] # +1 = BUILD_TUPLE + + found = iter(found[delta:]) + + args = [instruction for instruction in found] + if args: + first_instruction = args[0] + else: + first_instruction = next_instruction + prev = first_instruction + + fullrepr = [] + fullrepr.append(msg(prev, '(')) + for i, arg in enumerate(args): + if i > 0: + fullrepr.append(msg(prev, ', ')) + prev = arg + fullrepr.append(msg(arg)) + + fullrepr.append(msg(prev, ')')) + + if force_restart: + self.instructions.insert(delta, _Instruction( + first_instruction.opname, + first_instruction.opcode, + first_instruction.starts_line, + first_instruction.argval, + False, # first_instruction.is_jump_target, + first_instruction.offset, + tuple(fullrepr), + )) + return RESTART_FROM_LOOKAHEAD + + if fullrepr is not None and self.instructions: + if self.instructions[0].opname == 'POP_TOP': + self.instructions.pop(0) + + if self.instructions[0].opname in ('STORE_FAST', 'STORE_NAME'): + next_instruction = self.instructions.pop(0) + return msg(next_instruction), msg(next_instruction, ' = '), fullrepr + + if self.instructions[0].opname == 'RETURN_VALUE': + next_instruction = self.instructions.pop(0) + return msg(next_instruction, 'return ', line=self.min_line(next_instruction, fullrepr)), fullrepr + + return fullrepr + + def _decorate_jump_target(self, instruction, instruction_repr): + if instruction.is_jump_target: + return ('|', str(instruction.offset), '|', instruction_repr) + + return instruction_repr + + def _create_msg_part(self, instruction, tok=None, line=None): + dec = self._decorate_jump_target + if line is None or line in (self.BIG_LINE_INT, self.SMALL_LINE_INT): + line = self.op_offset_to_line[instruction.offset] + + argrepr = instruction.argrepr + if isinstance(argrepr, str) and argrepr.startswith('NULL + '): + argrepr = argrepr[7:] + return _MsgPart( + line, tok if tok is not None else dec(instruction, argrepr)) + + def _next_instruction_to_str(self, line_to_contents): + # indent = '' + # if self.level > 0: + # indent += ' ' * self.level + # print(indent, 'handle', self.instructions[0]) + + if self.instructions: + ret = self._lookahead() + if ret: + return ret + + msg = self._create_msg_part + + instruction = self.instructions.pop(0) + + if instruction.opname in 'RESUME': + return None + + if instruction.opname in ('LOAD_GLOBAL', 'LOAD_FAST', 'LOAD_CONST', 'LOAD_NAME'): + next_instruction = self.instructions[0] + if next_instruction.opname in ('STORE_FAST', 'STORE_NAME'): + self.instructions.pop(0) + return ( + msg(next_instruction), + msg(next_instruction, ' = '), + msg(instruction)) + + if next_instruction.opname == 'RETURN_VALUE': + self.instructions.pop(0) + return (msg(instruction, 'return ', line=self.min_line(instruction)), msg(instruction)) + + if next_instruction.opname == 'RAISE_VARARGS' and next_instruction.argval == 1: + self.instructions.pop(0) + return (msg(instruction, 'raise ', line=self.min_line(instruction)), msg(instruction)) + + if instruction.opname == 'LOAD_CONST': + if inspect.iscode(instruction.argval): + + code_line_to_contents = _Disassembler( + instruction.argval, self.firstlineno, self.level + 1 + ).build_line_to_contents() + + for contents in code_line_to_contents.values(): + contents.insert(0, ' ') + for line, contents in code_line_to_contents.items(): + line_to_contents.setdefault(line, []).extend(contents) + return msg(instruction, 'LOAD_CONST(code)') + + if instruction.opname == 'RAISE_VARARGS': + if instruction.argval == 0: + return msg(instruction, 'raise') + + if instruction.opname == 'SETUP_FINALLY': + return msg(instruction, ('try(', instruction.argrepr, '):')) + + if instruction.argrepr: + return msg(instruction, (instruction.opname, '(', instruction.argrepr, ')')) + + if instruction.argval: + return msg(instruction, '%s{%s}' % (instruction.opname, instruction.argval,)) + + return msg(instruction, instruction.opname) + + def build_line_to_contents(self): + # print('----') + # for instruction in self.instructions: + # print(instruction) + # print('----\n\n') + + line_to_contents = {} + + instructions = self.instructions + while instructions: + s = self._next_instruction_to_str(line_to_contents) + if s is RESTART_FROM_LOOKAHEAD: + continue + if s is None: + continue + + _MsgPart.add_to_line_to_contents(s, line_to_contents) + m = self.max_line(s) + if m != self.SMALL_LINE_INT: + line_to_contents.setdefault(m, []).append(SEPARATOR) + return line_to_contents + + def disassemble(self): + line_to_contents = self.build_line_to_contents() + stream = StringIO() + last_line = 0 + show_lines = False + for line, contents in sorted(line_to_contents.items()): + while last_line < line - 1: + if show_lines: + stream.write('%s.\n' % (last_line + 1,)) + else: + stream.write('\n') + last_line += 1 + + if show_lines: + stream.write('%s. ' % (line,)) + + for i, content in enumerate(contents): + if content == SEPARATOR: + if i != len(contents) - 1: + stream.write(', ') + else: + stream.write(content) + + stream.write('\n') + + last_line = line + + return stream.getvalue() + + +def code_to_bytecode_representation(co, use_func_first_line=False): + ''' + A simple disassemble of bytecode. + + It does not attempt to provide the full Python source code, rather, it provides a low-level + representation of the bytecode, respecting the lines (so, its target is making the bytecode + easier to grasp and not providing the original source code). + + Note that it does show jump locations/targets and converts some common bytecode constructs to + Python code to make it a bit easier to understand. + ''' + # Reference for bytecodes: + # https://docs.python.org/3/library/dis.html + if use_func_first_line: + firstlineno = co.co_firstlineno + else: + firstlineno = 0 + + return _Disassembler(co, firstlineno).disassemble() diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm.py new file mode 100644 index 0000000..930adda --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm.py @@ -0,0 +1,1822 @@ +''' pydevd - a debugging daemon +This is the daemon you launch for python remote debugging. + +Protocol: +each command has a format: + id\tsequence-num\ttext + id: protocol command number + sequence-num: each request has a sequence number. Sequence numbers + originating at the debugger are odd, sequence numbers originating + at the daemon are even. Every response uses the same sequence number + as the request. + payload: it is protocol dependent. When response is a complex structure, it + is returned as XML. Each attribute value is urlencoded, and then the whole + payload is urlencoded again to prevent stray characters corrupting protocol/xml encodings + + Commands: + + NUMBER NAME FROM* ARGUMENTS RESPONSE NOTE +100 series: program execution + 101 RUN JAVA - - + 102 LIST_THREADS JAVA RETURN with XML listing of all threads + 103 THREAD_CREATE PYDB - XML with thread information + 104 THREAD_KILL JAVA id (or * to exit) kills the thread + PYDB id nofies JAVA that thread was killed + 105 THREAD_SUSPEND JAVA XML of the stack, suspends the thread + reason for suspension + PYDB id notifies JAVA that thread was suspended + + 106 CMD_THREAD_RUN JAVA id resume the thread + PYDB id \t reason notifies JAVA that thread was resumed + + 107 STEP_INTO JAVA thread_id + 108 STEP_OVER JAVA thread_id + 109 STEP_RETURN JAVA thread_id + + 110 GET_VARIABLE JAVA thread_id \t frame_id \t GET_VARIABLE with XML of var content + FRAME|GLOBAL \t attributes* + + 111 SET_BREAK JAVA file/line of the breakpoint + 112 REMOVE_BREAK JAVA file/line of the return + 113 CMD_EVALUATE_EXPRESSION JAVA expression result of evaluating the expression + 114 CMD_GET_FRAME JAVA request for frame contents + 115 CMD_EXEC_EXPRESSION JAVA + 116 CMD_WRITE_TO_CONSOLE PYDB + 117 CMD_CHANGE_VARIABLE + 118 CMD_RUN_TO_LINE + 119 CMD_RELOAD_CODE + 120 CMD_GET_COMPLETIONS JAVA + + 200 CMD_REDIRECT_OUTPUT JAVA streams to redirect as string - + 'STDOUT' (redirect only STDOUT) + 'STDERR' (redirect only STDERR) + 'STDOUT STDERR' (redirect both streams) + +500 series diagnostics/ok + 501 VERSION either Version string (1.0) Currently just used at startup + 502 RETURN either Depends on caller - + +900 series: errors + 901 ERROR either - This is reserved for unexpected errors. + + * JAVA - remote debugger, the java end + * PYDB - pydevd, the python end +''' + +import linecache +import os + +from _pydev_bundle.pydev_imports import _queue +from _pydev_bundle._pydev_saved_modules import time +from _pydev_bundle._pydev_saved_modules import threading +from _pydev_bundle._pydev_saved_modules import socket as socket_module +from _pydevd_bundle.pydevd_constants import (DebugInfoHolder, IS_WINDOWS, IS_JYTHON, + IS_PY36_OR_GREATER, STATE_RUN, ASYNC_EVAL_TIMEOUT_SEC, + get_global_debugger, GetGlobalDebugger, set_global_debugger, # Keep for backward compatibility @UnusedImport + silence_warnings_decorator, filter_all_warnings) +from _pydev_bundle.pydev_override import overrides +import weakref +from _pydev_bundle._pydev_completer import extract_token_and_qualifier +from _pydevd_bundle._debug_adapter.pydevd_schema import VariablesResponseBody, \ + SetVariableResponseBody, StepInTarget, StepInTargetsResponseBody +from _pydevd_bundle._debug_adapter import pydevd_base_schema, pydevd_schema +from _pydevd_bundle.pydevd_net_command import NetCommand +from _pydevd_bundle.pydevd_xml import ExceptionOnEvaluate +from _pydevd_bundle.pydevd_constants import ForkSafeLock, NULL +from _pydevd_bundle.pydevd_daemon_thread import PyDBDaemonThread +from _pydevd_bundle.pydevd_thread_lifecycle import pydevd_find_thread_by_id, resume_threads +from _pydevd_bundle.pydevd_dont_trace_files import PYDEV_FILE +import dis +from _pydevd_bundle.pydevd_frame_utils import create_frames_list_from_exception_cause +import pydevd_file_utils +import itertools +from urllib.parse import quote_plus, unquote_plus +import pydevconsole +from _pydevd_bundle import pydevd_vars, pydevd_io, pydevd_reload +from _pydevd_bundle import pydevd_bytecode_utils +from _pydevd_bundle import pydevd_xml +from _pydevd_bundle import pydevd_vm_type +import sys +import traceback +from _pydevd_bundle.pydevd_utils import quote_smart as quote, compare_object_attrs_key, \ + notify_about_gevent_if_needed, isinstance_checked, ScopeRequest, getattr_checked, Timer +from _pydev_bundle import pydev_log, fsnotify +from _pydev_bundle.pydev_log import exception as pydev_log_exception +from _pydev_bundle import _pydev_completer + +from pydevd_tracing import get_exception_traceback_str +from _pydevd_bundle import pydevd_console +from _pydev_bundle.pydev_monkey import disable_trace_thread_modules, enable_trace_thread_modules +from io import StringIO + +# CMD_XXX constants imported for backward compatibility +from _pydevd_bundle.pydevd_comm_constants import * # @UnusedWildImport + +# Socket import aliases: +AF_INET, SOCK_STREAM, SHUT_WR, SOL_SOCKET, SO_REUSEADDR, IPPROTO_TCP, socket = ( + socket_module.AF_INET, + socket_module.SOCK_STREAM, + socket_module.SHUT_WR, + socket_module.SOL_SOCKET, + socket_module.SO_REUSEADDR, + socket_module.IPPROTO_TCP, + socket_module.socket, +) + +if IS_WINDOWS and not IS_JYTHON: + SO_EXCLUSIVEADDRUSE = socket_module.SO_EXCLUSIVEADDRUSE + + +class ReaderThread(PyDBDaemonThread): + ''' reader thread reads and dispatches commands in an infinite loop ''' + + def __init__(self, sock, py_db, PyDevJsonCommandProcessor, process_net_command, terminate_on_socket_close=True): + assert sock is not None + PyDBDaemonThread.__init__(self, py_db) + self.__terminate_on_socket_close = terminate_on_socket_close + + self.sock = sock + self._buffer = b'' + self.name = "pydevd.Reader" + self.process_net_command = process_net_command + self.process_net_command_json = PyDevJsonCommandProcessor(self._from_json).process_net_command_json + + def _from_json(self, json_msg, update_ids_from_dap=False): + return pydevd_base_schema.from_json(json_msg, update_ids_from_dap, on_dict_loaded=self._on_dict_loaded) + + def _on_dict_loaded(self, dct): + for listener in self.py_db.dap_messages_listeners: + listener.after_receive(dct) + + @overrides(PyDBDaemonThread.do_kill_pydev_thread) + def do_kill_pydev_thread(self): + PyDBDaemonThread.do_kill_pydev_thread(self) + # Note that we no longer shutdown the reader, just the writer. The idea is that we shutdown + # the writer to send that the communication has finished, then, the client will shutdown its + # own writer when it receives an empty read, at which point this reader will also shutdown. + + # That way, we can *almost* guarantee that all messages have been properly sent -- it's not + # completely guaranteed because it's possible that the process exits before the whole + # message was sent as having this thread alive won't stop the process from exiting -- we + # have a timeout when exiting the process waiting for this thread to finish -- see: + # PyDB.dispose_and_kill_all_pydevd_threads()). + + # try: + # self.sock.shutdown(SHUT_RD) + # except: + # pass + # try: + # self.sock.close() + # except: + # pass + + def _read(self, size): + while True: + buffer_len = len(self._buffer) + if buffer_len == size: + ret = self._buffer + self._buffer = b'' + return ret + + if buffer_len > size: + ret = self._buffer[:size] + self._buffer = self._buffer[size:] + return ret + + try: + r = self.sock.recv(max(size - buffer_len, 1024)) + except OSError: + return b'' + if not r: + return b'' + self._buffer += r + + def _read_line(self): + while True: + i = self._buffer.find(b'\n') + if i != -1: + i += 1 # Add the newline to the return + ret = self._buffer[:i] + self._buffer = self._buffer[i:] + return ret + else: + try: + r = self.sock.recv(1024) + except OSError: + return b'' + if not r: + return b'' + self._buffer += r + + @overrides(PyDBDaemonThread._on_run) + def _on_run(self): + try: + content_len = -1 + + while True: + # i.e.: even if we received a kill, we should only exit the ReaderThread when the + # client itself closes the connection (although on kill received we stop actually + # processing anything read). + try: + notify_about_gevent_if_needed() + line = self._read_line() + + if len(line) == 0: + pydev_log.debug('ReaderThread: empty contents received (len(line) == 0).') + self._terminate_on_socket_close() + return # Finished communication. + + if self._kill_received: + continue + + if line.startswith(b'Content-Length:'): + content_len = int(line.strip().split(b':', 1)[1]) + continue + + if content_len != -1: + # If we previously received a content length, read until a '\r\n'. + if line == b'\r\n': + json_contents = self._read(content_len) + + content_len = -1 + + if len(json_contents) == 0: + pydev_log.debug('ReaderThread: empty contents received (len(json_contents) == 0).') + self._terminate_on_socket_close() + return # Finished communication. + + if self._kill_received: + continue + + # We just received a json message, let's process it. + self.process_net_command_json(self.py_db, json_contents) + + continue + else: + # No content len, regular line-based protocol message (remove trailing new-line). + if line.endswith(b'\n\n'): + line = line[:-2] + + elif line.endswith(b'\n'): + line = line[:-1] + + elif line.endswith(b'\r'): + line = line[:-1] + except: + if not self._kill_received: + pydev_log_exception() + self._terminate_on_socket_close() + return # Finished communication. + + # Note: the java backend is always expected to pass utf-8 encoded strings. We now work with str + # internally and thus, we may need to convert to the actual encoding where needed (i.e.: filenames + # on python 2 may need to be converted to the filesystem encoding). + if hasattr(line, 'decode'): + line = line.decode('utf-8') + + if DebugInfoHolder.DEBUG_RECORD_SOCKET_READS: + pydev_log.critical(u'debugger: received >>%s<<\n' % (line,)) + + args = line.split(u'\t', 2) + try: + cmd_id = int(args[0]) + pydev_log.debug('Received command: %s %s\n' % (ID_TO_MEANING.get(str(cmd_id), '???'), line,)) + self.process_command(cmd_id, int(args[1]), args[2]) + except: + if sys is not None and pydev_log_exception is not None: # Could happen at interpreter shutdown + pydev_log_exception("Can't process net command: %s.", line) + + except: + if not self._kill_received: + if sys is not None and pydev_log_exception is not None: # Could happen at interpreter shutdown + pydev_log_exception() + + self._terminate_on_socket_close() + finally: + pydev_log.debug('ReaderThread: exit') + + def _terminate_on_socket_close(self): + if self.__terminate_on_socket_close: + self.py_db.dispose_and_kill_all_pydevd_threads() + + def process_command(self, cmd_id, seq, text): + self.process_net_command(self.py_db, cmd_id, seq, text) + + +class FSNotifyThread(PyDBDaemonThread): + + def __init__(self, py_db, api, watch_dirs): + PyDBDaemonThread.__init__(self, py_db) + self.api = api + self.name = "pydevd.FSNotifyThread" + self.watcher = fsnotify.Watcher() + self.watch_dirs = watch_dirs + + @overrides(PyDBDaemonThread._on_run) + def _on_run(self): + try: + pydev_log.info('Watching directories for code reload:\n---\n%s\n---' % ('\n'.join(sorted(self.watch_dirs)))) + + # i.e.: The first call to set_tracked_paths will do a full scan, so, do it in the thread + # too (after everything is configured). + self.watcher.set_tracked_paths(self.watch_dirs) + while not self._kill_received: + for change_enum, change_path in self.watcher.iter_changes(): + # We're only interested in modified events + if change_enum == fsnotify.Change.modified: + pydev_log.info('Modified: %s', change_path) + self.api.request_reload_code(self.py_db, -1, None, change_path) + else: + pydev_log.info('Ignored (add or remove) change in: %s', change_path) + except: + pydev_log.exception('Error when waiting for filesystem changes in FSNotifyThread.') + + @overrides(PyDBDaemonThread.do_kill_pydev_thread) + def do_kill_pydev_thread(self): + self.watcher.dispose() + PyDBDaemonThread.do_kill_pydev_thread(self) + + +class WriterThread(PyDBDaemonThread): + ''' writer thread writes out the commands in an infinite loop ''' + + def __init__(self, sock, py_db, terminate_on_socket_close=True): + PyDBDaemonThread.__init__(self, py_db) + self.sock = sock + self.__terminate_on_socket_close = terminate_on_socket_close + self.name = "pydevd.Writer" + self._cmd_queue = _queue.Queue() + if pydevd_vm_type.get_vm_type() == 'python': + self.timeout = 0 + else: + self.timeout = 0.1 + + def add_command(self, cmd): + ''' cmd is NetCommand ''' + if not self._kill_received: # we don't take new data after everybody die + self._cmd_queue.put(cmd, False) + + @overrides(PyDBDaemonThread._on_run) + def _on_run(self): + ''' just loop and write responses ''' + + try: + while True: + try: + try: + cmd = self._cmd_queue.get(True, 0.1) + except _queue.Empty: + if self._kill_received: + pydev_log.debug('WriterThread: kill_received (sock.shutdown(SHUT_WR))') + try: + self.sock.shutdown(SHUT_WR) + except: + pass + # Note: don't close the socket, just send the shutdown, + # then, when no data is received on the reader, it can close + # the socket. + # See: https://blog.netherlabs.nl/articles/2009/01/18/the-ultimate-so_linger-page-or-why-is-my-tcp-not-reliable + + # try: + # self.sock.close() + # except: + # pass + + return # break if queue is empty and _kill_received + else: + continue + except: + # pydev_log.info('Finishing debug communication...(1)') + # when liberating the thread here, we could have errors because we were shutting down + # but the thread was still not liberated + return + + if cmd.as_dict is not None: + for listener in self.py_db.dap_messages_listeners: + listener.before_send(cmd.as_dict) + + notify_about_gevent_if_needed() + cmd.send(self.sock) + + if cmd.id == CMD_EXIT: + pydev_log.debug('WriterThread: CMD_EXIT received') + break + if time is None: + break # interpreter shutdown + time.sleep(self.timeout) + except Exception: + if self.__terminate_on_socket_close: + self.py_db.dispose_and_kill_all_pydevd_threads() + if DebugInfoHolder.DEBUG_TRACE_LEVEL > 0: + pydev_log_exception() + finally: + pydev_log.debug('WriterThread: exit') + + def empty(self): + return self._cmd_queue.empty() + + @overrides(PyDBDaemonThread.do_kill_pydev_thread) + def do_kill_pydev_thread(self): + if not self._kill_received: + # Add command before setting the kill flag (otherwise the command may not be added). + exit_cmd = self.py_db.cmd_factory.make_exit_command(self.py_db) + self.add_command(exit_cmd) + + PyDBDaemonThread.do_kill_pydev_thread(self) + + +def create_server_socket(host, port): + try: + server = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP) + if IS_WINDOWS and not IS_JYTHON: + server.setsockopt(SOL_SOCKET, SO_EXCLUSIVEADDRUSE, 1) + else: + server.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) + + server.bind((host, port)) + server.settimeout(None) + except Exception: + server.close() + raise + + return server + + +def start_server(port): + ''' binds to a port, waits for the debugger to connect ''' + s = create_server_socket(host='', port=port) + + try: + s.listen(1) + new_socket, _addr = s.accept() + pydev_log.info("Connection accepted") + # closing server socket is not necessary but we don't need it + s.close() + return new_socket + except: + pydev_log.exception("Could not bind to port: %s\n", port) + raise + + +def start_client(host, port): + ''' connects to a host/port ''' + pydev_log.info("Connecting to %s:%s", host, port) + + s = socket(AF_INET, SOCK_STREAM) + + # Set TCP keepalive on an open socket. + # It activates after 1 second (TCP_KEEPIDLE,) of idleness, + # then sends a keepalive ping once every 3 seconds (TCP_KEEPINTVL), + # and closes the connection after 5 failed ping (TCP_KEEPCNT), or 15 seconds + try: + s.setsockopt(SOL_SOCKET, socket_module.SO_KEEPALIVE, 1) + except (AttributeError, OSError): + pass # May not be available everywhere. + try: + s.setsockopt(socket_module.IPPROTO_TCP, socket_module.TCP_KEEPIDLE, 1) + except (AttributeError, OSError): + pass # May not be available everywhere. + try: + s.setsockopt(socket_module.IPPROTO_TCP, socket_module.TCP_KEEPINTVL, 3) + except (AttributeError, OSError): + pass # May not be available everywhere. + try: + s.setsockopt(socket_module.IPPROTO_TCP, socket_module.TCP_KEEPCNT, 5) + except (AttributeError, OSError): + pass # May not be available everywhere. + + try: + # 10 seconds default timeout + timeout = int(os.environ.get('PYDEVD_CONNECT_TIMEOUT', 10)) + s.settimeout(timeout) + s.connect((host, port)) + s.settimeout(None) # no timeout after connected + pydev_log.info("Connected.") + return s + except: + pydev_log.exception("Could not connect to %s: %s", host, port) + raise + + +INTERNAL_TERMINATE_THREAD = 1 +INTERNAL_SUSPEND_THREAD = 2 + + +class InternalThreadCommand(object): + ''' internal commands are generated/executed by the debugger. + + The reason for their existence is that some commands have to be executed + on specific threads. These are the InternalThreadCommands that get + get posted to PyDB. + ''' + + def __init__(self, thread_id, method=None, *args, **kwargs): + self.thread_id = thread_id + self.method = method + self.args = args + self.kwargs = kwargs + + def can_be_executed_by(self, thread_id): + '''By default, it must be in the same thread to be executed + ''' + return self.thread_id == thread_id or self.thread_id.endswith('|' + thread_id) + + def do_it(self, dbg): + try: + if self.method is not None: + self.method(dbg, *self.args, **self.kwargs) + else: + raise NotImplementedError("you have to override do_it") + finally: + self.args = None + self.kwargs = None + + def __str__(self): + return 'InternalThreadCommands(%s, %s, %s)' % (self.method, self.args, self.kwargs) + + __repr__ = __str__ + + +class InternalThreadCommandForAnyThread(InternalThreadCommand): + + def __init__(self, thread_id, method=None, *args, **kwargs): + assert thread_id == '*' + + InternalThreadCommand.__init__(self, thread_id, method, *args, **kwargs) + + self.executed = False + self.lock = ForkSafeLock() + + def can_be_executed_by(self, thread_id): + return True # Can be executed by any thread. + + def do_it(self, dbg): + with self.lock: + if self.executed: + return + self.executed = True + + InternalThreadCommand.do_it(self, dbg) + + +def _send_io_message(py_db, s): + cmd = py_db.cmd_factory.make_io_message(s, 2) + if py_db.writer is not None: + py_db.writer.add_command(cmd) + + +def internal_reload_code(dbg, seq, module_name, filename): + try: + found_module_to_reload = False + if module_name is not None: + module_name = module_name + if module_name not in sys.modules: + if '.' in module_name: + new_module_name = module_name.split('.')[-1] + if new_module_name in sys.modules: + module_name = new_module_name + + modules_to_reload = {} + module = sys.modules.get(module_name) + if module is not None: + modules_to_reload[id(module)] = (module, module_name) + + if filename: + filename = pydevd_file_utils.normcase(filename) + for module_name, module in sys.modules.copy().items(): + f = getattr_checked(module, '__file__') + if f is not None: + if f.endswith(('.pyc', '.pyo')): + f = f[:-1] + + if pydevd_file_utils.normcase(f) == filename: + modules_to_reload[id(module)] = (module, module_name) + + if not modules_to_reload: + if filename and module_name: + _send_io_message(dbg, 'code reload: Unable to find module %s to reload for path: %s\n' % (module_name, filename)) + elif filename: + _send_io_message(dbg, 'code reload: Unable to find module to reload for path: %s\n' % (filename,)) + elif module_name: + _send_io_message(dbg, 'code reload: Unable to find module to reload: %s\n' % (module_name,)) + + else: + # Too much info... + # _send_io_message(dbg, 'code reload: This usually means you are trying to reload the __main__ module (which cannot be reloaded).\n') + for module, module_name in modules_to_reload.values(): + _send_io_message(dbg, 'code reload: Start reloading module: "' + module_name + '" ... \n') + found_module_to_reload = True + + if pydevd_reload.xreload(module): + _send_io_message(dbg, 'code reload: reload finished\n') + else: + _send_io_message(dbg, 'code reload: reload finished without applying any change\n') + + cmd = dbg.cmd_factory.make_reloaded_code_message(seq, found_module_to_reload) + dbg.writer.add_command(cmd) + except: + pydev_log.exception('Error reloading code') + + +class InternalGetThreadStack(InternalThreadCommand): + ''' + This command will either wait for a given thread to be paused to get its stack or will provide + it anyways after a timeout (in which case the stack will be gotten but local variables won't + be available and it'll not be possible to interact with the frame as it's not actually + stopped in a breakpoint). + ''' + + def __init__(self, seq, thread_id, py_db, set_additional_thread_info, fmt, timeout=.5, start_frame=0, levels=0): + InternalThreadCommand.__init__(self, thread_id) + self._py_db = weakref.ref(py_db) + self._timeout = time.time() + timeout + self.seq = seq + self._cmd = None + self._fmt = fmt + self._start_frame = start_frame + self._levels = levels + + # Note: receives set_additional_thread_info to avoid a circular import + # in this module. + self._set_additional_thread_info = set_additional_thread_info + + @overrides(InternalThreadCommand.can_be_executed_by) + def can_be_executed_by(self, _thread_id): + timed_out = time.time() >= self._timeout + + py_db = self._py_db() + t = pydevd_find_thread_by_id(self.thread_id) + frame = None + if t and not getattr(t, 'pydev_do_not_trace', None): + additional_info = self._set_additional_thread_info(t) + frame = additional_info.get_topmost_frame(t) + try: + self._cmd = py_db.cmd_factory.make_get_thread_stack_message( + py_db, self.seq, self.thread_id, frame, self._fmt, must_be_suspended=not timed_out, start_frame=self._start_frame, levels=self._levels) + finally: + frame = None + t = None + + return self._cmd is not None or timed_out + + @overrides(InternalThreadCommand.do_it) + def do_it(self, dbg): + if self._cmd is not None: + dbg.writer.add_command(self._cmd) + self._cmd = None + + +def internal_step_in_thread(py_db, thread_id, cmd_id, set_additional_thread_info): + thread_to_step = pydevd_find_thread_by_id(thread_id) + if thread_to_step: + info = set_additional_thread_info(thread_to_step) + info.pydev_original_step_cmd = cmd_id + info.pydev_step_cmd = cmd_id + info.pydev_step_stop = None + info.pydev_state = STATE_RUN + + if py_db.stepping_resumes_all_threads: + resume_threads('*', except_thread=thread_to_step) + + +def internal_smart_step_into(py_db, thread_id, offset, child_offset, set_additional_thread_info): + thread_to_step = pydevd_find_thread_by_id(thread_id) + if thread_to_step: + info = set_additional_thread_info(thread_to_step) + info.pydev_original_step_cmd = CMD_SMART_STEP_INTO + info.pydev_step_cmd = CMD_SMART_STEP_INTO + info.pydev_step_stop = None + info.pydev_smart_parent_offset = int(offset) + info.pydev_smart_child_offset = int(child_offset) + info.pydev_state = STATE_RUN + + if py_db.stepping_resumes_all_threads: + resume_threads('*', except_thread=thread_to_step) + + +class InternalSetNextStatementThread(InternalThreadCommand): + + def __init__(self, thread_id, cmd_id, line, func_name, seq=0): + ''' + cmd_id may actually be one of: + + CMD_RUN_TO_LINE + CMD_SET_NEXT_STATEMENT + CMD_SMART_STEP_INTO + ''' + self.thread_id = thread_id + self.cmd_id = cmd_id + self.line = line + self.seq = seq + + self.func_name = func_name + + def do_it(self, dbg): + t = pydevd_find_thread_by_id(self.thread_id) + if t: + info = t.additional_info + info.pydev_original_step_cmd = self.cmd_id + info.pydev_step_cmd = self.cmd_id + info.pydev_step_stop = None + info.pydev_next_line = int(self.line) + info.pydev_func_name = self.func_name + info.pydev_message = str(self.seq) + info.pydev_smart_parent_offset = -1 + info.pydev_smart_child_offset = -1 + info.pydev_state = STATE_RUN + + +@silence_warnings_decorator +def internal_get_variable_json(py_db, request): + ''' + :param VariablesRequest request: + ''' + arguments = request.arguments # : :type arguments: VariablesArguments + variables_reference = arguments.variablesReference + scope = None + if isinstance_checked(variables_reference, ScopeRequest): + scope = variables_reference + variables_reference = variables_reference.variable_reference + + fmt = arguments.format + if hasattr(fmt, 'to_dict'): + fmt = fmt.to_dict() + + variables = [] + try: + try: + variable = py_db.suspended_frames_manager.get_variable(variables_reference) + except KeyError: + pass + else: + for child_var in variable.get_children_variables(fmt=fmt, scope=scope): + variables.append(child_var.get_var_data(fmt=fmt)) + except: + try: + exc, exc_type, tb = sys.exc_info() + err = ''.join(traceback.format_exception(exc, exc_type, tb)) + variables = [{ + 'name': '', + 'value': err, + 'type': '', + 'variablesReference': 0 + }] + except: + err = '' + pydev_log.exception(err) + variables = [] + + body = VariablesResponseBody(variables) + variables_response = pydevd_base_schema.build_response(request, kwargs={'body':body}) + py_db.writer.add_command(NetCommand(CMD_RETURN, 0, variables_response, is_json=True)) + + +class InternalGetVariable(InternalThreadCommand): + ''' gets the value of a variable ''' + + def __init__(self, seq, thread_id, frame_id, scope, attrs): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.scope = scope + self.attributes = attrs + + @silence_warnings_decorator + def do_it(self, dbg): + ''' Converts request into python variable ''' + try: + xml = StringIO() + xml.write("") + type_name, val_dict = pydevd_vars.resolve_compound_variable_fields( + dbg, self.thread_id, self.frame_id, self.scope, self.attributes) + if val_dict is None: + val_dict = {} + + # assume properly ordered if resolver returns 'OrderedDict' + # check type as string to support OrderedDict backport for older Python + keys = list(val_dict) + if not (type_name == "OrderedDict" or val_dict.__class__.__name__ == "OrderedDict" or IS_PY36_OR_GREATER): + keys = sorted(keys, key=compare_object_attrs_key) + + timer = Timer() + for k in keys: + val = val_dict[k] + evaluate_full_value = pydevd_xml.should_evaluate_full_value(val) + xml.write(pydevd_xml.var_to_xml(val, k, evaluate_full_value=evaluate_full_value)) + timer.report_if_compute_repr_attr_slow(self.attributes, k, type(val)) + + xml.write("") + cmd = dbg.cmd_factory.make_get_variable_message(self.sequence, xml.getvalue()) + xml.close() + dbg.writer.add_command(cmd) + except Exception: + cmd = dbg.cmd_factory.make_error_message( + self.sequence, "Error resolving variables %s" % (get_exception_traceback_str(),)) + dbg.writer.add_command(cmd) + + +class InternalGetArray(InternalThreadCommand): + + def __init__(self, seq, roffset, coffset, rows, cols, format, thread_id, frame_id, scope, attrs): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.scope = scope + self.name = attrs.split("\t")[-1] + self.attrs = attrs + self.roffset = int(roffset) + self.coffset = int(coffset) + self.rows = int(rows) + self.cols = int(cols) + self.format = format + + def do_it(self, dbg): + try: + frame = dbg.find_frame(self.thread_id, self.frame_id) + var = pydevd_vars.eval_in_context(self.name, frame.f_globals, frame.f_locals, py_db=dbg) + xml = pydevd_vars.table_like_struct_to_xml(var, self.name, self.roffset, self.coffset, self.rows, self.cols, self.format) + cmd = dbg.cmd_factory.make_get_array_message(self.sequence, xml) + dbg.writer.add_command(cmd) + except: + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error resolving array: " + get_exception_traceback_str()) + dbg.writer.add_command(cmd) + + +def internal_change_variable(dbg, seq, thread_id, frame_id, scope, attr, value): + ''' Changes the value of a variable ''' + try: + frame = dbg.find_frame(thread_id, frame_id) + if frame is not None: + result = pydevd_vars.change_attr_expression(frame, attr, value, dbg) + else: + result = None + xml = "" + xml += pydevd_xml.var_to_xml(result, "") + xml += "" + cmd = dbg.cmd_factory.make_variable_changed_message(seq, xml) + dbg.writer.add_command(cmd) + except Exception: + cmd = dbg.cmd_factory.make_error_message(seq, "Error changing variable attr:%s expression:%s traceback:%s" % (attr, value, get_exception_traceback_str())) + dbg.writer.add_command(cmd) + + +def internal_change_variable_json(py_db, request): + ''' + The pydevd_vars.change_attr_expression(thread_id, frame_id, attr, value, dbg) can only + deal with changing at a frame level, so, currently changing the contents of something + in a different scope is currently not supported. + + :param SetVariableRequest request: + ''' + # : :type arguments: SetVariableArguments + arguments = request.arguments + variables_reference = arguments.variablesReference + scope = None + if isinstance_checked(variables_reference, ScopeRequest): + scope = variables_reference + variables_reference = variables_reference.variable_reference + + fmt = arguments.format + if hasattr(fmt, 'to_dict'): + fmt = fmt.to_dict() + + try: + variable = py_db.suspended_frames_manager.get_variable(variables_reference) + except KeyError: + variable = None + + if variable is None: + _write_variable_response( + py_db, request, value='', success=False, message='Unable to find variable container to change: %s.' % (variables_reference,)) + return + + child_var = variable.change_variable(arguments.name, arguments.value, py_db, fmt=fmt) + + if child_var is None: + _write_variable_response( + py_db, request, value='', success=False, message='Unable to change: %s.' % (arguments.name,)) + return + + var_data = child_var.get_var_data(fmt=fmt) + body = SetVariableResponseBody( + value=var_data['value'], + type=var_data['type'], + variablesReference=var_data.get('variablesReference'), + namedVariables=var_data.get('namedVariables'), + indexedVariables=var_data.get('indexedVariables'), + ) + variables_response = pydevd_base_schema.build_response(request, kwargs={'body':body}) + py_db.writer.add_command(NetCommand(CMD_RETURN, 0, variables_response, is_json=True)) + + +def _write_variable_response(py_db, request, value, success, message): + body = SetVariableResponseBody('') + variables_response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body':body, + 'success': False, + 'message': message + }) + cmd = NetCommand(CMD_RETURN, 0, variables_response, is_json=True) + py_db.writer.add_command(cmd) + + +@silence_warnings_decorator +def internal_get_frame(dbg, seq, thread_id, frame_id): + ''' Converts request into python variable ''' + try: + frame = dbg.find_frame(thread_id, frame_id) + if frame is not None: + hidden_ns = pydevconsole.get_ipython_hidden_vars() + xml = "" + xml += pydevd_xml.frame_vars_to_xml(frame.f_locals, hidden_ns) + del frame + xml += "" + cmd = dbg.cmd_factory.make_get_frame_message(seq, xml) + dbg.writer.add_command(cmd) + else: + # pydevd_vars.dump_frames(thread_id) + # don't print this error: frame not found: means that the client is not synchronized (but that's ok) + cmd = dbg.cmd_factory.make_error_message(seq, "Frame not found: %s from thread: %s" % (frame_id, thread_id)) + dbg.writer.add_command(cmd) + except: + cmd = dbg.cmd_factory.make_error_message(seq, "Error resolving frame: %s from thread: %s" % (frame_id, thread_id)) + dbg.writer.add_command(cmd) + + +def internal_get_smart_step_into_variants(dbg, seq, thread_id, frame_id, start_line, end_line, set_additional_thread_info): + try: + thread = pydevd_find_thread_by_id(thread_id) + frame = dbg.find_frame(thread_id, frame_id) + + if thread is None or frame is None: + cmd = dbg.cmd_factory.make_error_message(seq, "Frame not found: %s from thread: %s" % (frame_id, thread_id)) + dbg.writer.add_command(cmd) + return + + if pydevd_bytecode_utils is None: + variants = [] + else: + variants = pydevd_bytecode_utils.calculate_smart_step_into_variants(frame, int(start_line), int(end_line)) + + info = set_additional_thread_info(thread) + + # Store the last request (may be used afterwards when stepping). + info.pydev_smart_step_into_variants = tuple(variants) + xml = "" + + for variant in variants: + if variant.children_variants: + for child_variant in variant.children_variants: + # If there are child variants, the current one is just an intermediary, so, + # just create variants for the child (notifying properly about the parent too). + xml += '' % ( + quote(child_variant.name), + str(child_variant.is_visited).lower(), + child_variant.line, + variant.offset, + child_variant.offset, + child_variant.call_order, + ) + else: + xml += '' % ( + quote(variant.name), + str(variant.is_visited).lower(), + variant.line, + variant.offset, + variant.call_order, + ) + + xml += "" + cmd = NetCommand(CMD_GET_SMART_STEP_INTO_VARIANTS, seq, xml) + dbg.writer.add_command(cmd) + except: + # Error is expected (if `dis` module cannot be used -- i.e.: Jython). + pydev_log.exception('Error calculating Smart Step Into Variants.') + cmd = dbg.cmd_factory.make_error_message( + seq, "Error getting smart step into variants for frame: %s from thread: %s" + % (frame_id, thread_id)) + dbg.writer.add_command(cmd) + + +def internal_get_step_in_targets_json(dbg, seq, thread_id, frame_id, request, set_additional_thread_info): + try: + thread = pydevd_find_thread_by_id(thread_id) + frame = dbg.find_frame(thread_id, frame_id) + + if thread is None or frame is None: + body = StepInTargetsResponseBody([]) + variables_response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': body, + 'success': False, + 'message': 'Thread to get step in targets seems to have resumed already.' + }) + cmd = NetCommand(CMD_RETURN, 0, variables_response, is_json=True) + dbg.writer.add_command(cmd) + return + + start_line = 0 + end_line = 99999999 + if pydevd_bytecode_utils is None: + variants = [] + else: + variants = pydevd_bytecode_utils.calculate_smart_step_into_variants(frame, start_line, end_line) + + info = set_additional_thread_info(thread) + targets = [] + counter = itertools.count(0) + target_id_to_variant = {} + for variant in variants: + if not variant.is_visited: + if variant.children_variants: + for child_variant in variant.children_variants: + target_id = next(counter) + + if child_variant.call_order > 1: + targets.append(StepInTarget(id=target_id, label='%s (call %s)' % (child_variant.name, child_variant.call_order),)) + else: + targets.append(StepInTarget(id=target_id, label=child_variant.name)) + target_id_to_variant[target_id] = child_variant + + if len(targets) >= 15: # Show at most 15 targets. + break + else: + target_id = next(counter) + if variant.call_order > 1: + targets.append(StepInTarget(id=target_id, label='%s (call %s)' % (variant.name, variant.call_order),)) + else: + targets.append(StepInTarget(id=target_id, label=variant.name)) + target_id_to_variant[target_id] = variant + + if len(targets) >= 15: # Show at most 15 targets. + break + + # Store the last request (may be used afterwards when stepping). + info.pydev_smart_step_into_variants = tuple(variants) + info.target_id_to_smart_step_into_variant = target_id_to_variant + + body = StepInTargetsResponseBody(targets=targets) + response = pydevd_base_schema.build_response(request, kwargs={'body': body}) + cmd = NetCommand(CMD_RETURN, 0, response, is_json=True) + dbg.writer.add_command(cmd) + except Exception as e: + # Error is expected (if `dis` module cannot be used -- i.e.: Jython). + pydev_log.exception('Error calculating Smart Step Into Variants.') + body = StepInTargetsResponseBody([]) + variables_response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': body, + 'success': False, + 'message': str(e) + }) + cmd = NetCommand(CMD_RETURN, 0, variables_response, is_json=True) + dbg.writer.add_command(cmd) + + +def internal_get_next_statement_targets(dbg, seq, thread_id, frame_id): + ''' gets the valid line numbers for use with set next statement ''' + try: + frame = dbg.find_frame(thread_id, frame_id) + if frame is not None: + code = frame.f_code + xml = "" + try: + linestarts = dis.findlinestarts(code) + except: + # i.e.: jython doesn't provide co_lnotab, so, we can only keep at the current line. + xml += "%d" % (frame.f_lineno,) + else: + for _, line in linestarts: + xml += "%d" % (line,) + del frame + xml += "" + cmd = dbg.cmd_factory.make_get_next_statement_targets_message(seq, xml) + dbg.writer.add_command(cmd) + else: + cmd = dbg.cmd_factory.make_error_message(seq, "Frame not found: %s from thread: %s" % (frame_id, thread_id)) + dbg.writer.add_command(cmd) + except: + cmd = dbg.cmd_factory.make_error_message(seq, "Error resolving frame: %s from thread: %s" % (frame_id, thread_id)) + dbg.writer.add_command(cmd) + + +def _evaluate_response(py_db, request, result, error_message=''): + is_error = isinstance(result, ExceptionOnEvaluate) + if is_error: + result = result.result + if not error_message: + body = pydevd_schema.EvaluateResponseBody(result=result, variablesReference=0) + variables_response = pydevd_base_schema.build_response(request, kwargs={'body':body}) + py_db.writer.add_command(NetCommand(CMD_RETURN, 0, variables_response, is_json=True)) + else: + body = pydevd_schema.EvaluateResponseBody(result=result, variablesReference=0) + variables_response = pydevd_base_schema.build_response(request, kwargs={ + 'body':body, 'success':False, 'message': error_message}) + py_db.writer.add_command(NetCommand(CMD_RETURN, 0, variables_response, is_json=True)) + + +_global_frame = None + + +def internal_evaluate_expression_json(py_db, request, thread_id): + ''' + :param EvaluateRequest request: + ''' + global _global_frame + # : :type arguments: EvaluateArguments + + arguments = request.arguments + expression = arguments.expression + frame_id = arguments.frameId + context = arguments.context + fmt = arguments.format + if hasattr(fmt, 'to_dict'): + fmt = fmt.to_dict() + + ctx = NULL + if context == 'repl': + if not py_db.is_output_redirected: + ctx = pydevd_io.redirect_stream_to_pydb_io_messages_context() + else: + # If we're not in a repl (watch, hover, ...) don't show warnings. + ctx = filter_all_warnings() + + with ctx: + try_exec = False + if frame_id is None: + if _global_frame is None: + # Lazily create a frame to be used for evaluation with no frame id. + + def __create_frame(): + yield sys._getframe() + + _global_frame = next(__create_frame()) + + frame = _global_frame + try_exec = True # Always exec in this case + eval_result = None + else: + frame = py_db.find_frame(thread_id, frame_id) + + eval_result = pydevd_vars.evaluate_expression(py_db, frame, expression, is_exec=False) + is_error = isinstance_checked(eval_result, ExceptionOnEvaluate) + if is_error: + if context == 'hover': # In a hover it doesn't make sense to do an exec. + _evaluate_response(py_db, request, result='', error_message='Exception occurred during evaluation.') + return + elif context == 'watch': + # If it's a watch, don't show it as an exception object, rather, format + # it and show it as a string (with success=False). + msg = '%s: %s' % ( + eval_result.result.__class__.__name__, eval_result.result,) + _evaluate_response(py_db, request, result=msg, error_message=msg) + return + else: + # We only try the exec if the failure we had was due to not being able + # to evaluate the expression. + try: + pydevd_vars.compile_as_eval(expression) + except Exception: + try_exec = context == 'repl' + else: + try_exec = False + if context == 'repl': + # In the repl we should show the exception to the user. + _evaluate_response_return_exception(py_db, request, eval_result.etype, eval_result.result, eval_result.tb) + return + + if try_exec: + try: + pydevd_vars.evaluate_expression(py_db, frame, expression, is_exec=True) + except (Exception, KeyboardInterrupt): + _evaluate_response_return_exception(py_db, request, *sys.exc_info()) + return + # No result on exec. + _evaluate_response(py_db, request, result='') + return + + # Ok, we have the result (could be an error), let's put it into the saved variables. + frame_tracker = py_db.suspended_frames_manager.get_frame_tracker(thread_id) + if frame_tracker is None: + # This is not really expected. + _evaluate_response(py_db, request, result='', error_message='Thread id: %s is not current thread id.' % (thread_id,)) + return + + safe_repr_custom_attrs = {} + if context == 'clipboard': + safe_repr_custom_attrs = dict( + maxstring_outer=2 ** 64, + maxstring_inner=2 ** 64, + maxother_outer=2 ** 64, + maxother_inner=2 ** 64, + ) + + if context == 'repl' and eval_result is None: + # We don't want "None" to appear when typing in the repl. + body = pydevd_schema.EvaluateResponseBody( + result=None, + variablesReference=0, + ) + + else: + variable = frame_tracker.obtain_as_variable(expression, eval_result, frame=frame) + var_data = variable.get_var_data(fmt=fmt, **safe_repr_custom_attrs) + + body = pydevd_schema.EvaluateResponseBody( + result=var_data['value'], + variablesReference=var_data.get('variablesReference', 0), + type=var_data.get('type'), + presentationHint=var_data.get('presentationHint'), + namedVariables=var_data.get('namedVariables'), + indexedVariables=var_data.get('indexedVariables'), + ) + variables_response = pydevd_base_schema.build_response(request, kwargs={'body':body}) + py_db.writer.add_command(NetCommand(CMD_RETURN, 0, variables_response, is_json=True)) + + +def _evaluate_response_return_exception(py_db, request, exc_type, exc, initial_tb): + try: + tb = initial_tb + + # Show the traceback without pydevd frames. + temp_tb = tb + while temp_tb: + if py_db.get_file_type(temp_tb.tb_frame) == PYDEV_FILE: + tb = temp_tb.tb_next + temp_tb = temp_tb.tb_next + + if tb is None: + tb = initial_tb + err = ''.join(traceback.format_exception(exc_type, exc, tb)) + + # Make sure we don't keep references to them. + exc = None + exc_type = None + tb = None + temp_tb = None + initial_tb = None + except: + err = '' + pydev_log.exception(err) + + # Currently there is an issue in VSC where returning success=false for an + # eval request, in repl context, VSC does not show the error response in + # the debug console. So return the error message in result as well. + _evaluate_response(py_db, request, result=err, error_message=err) + + +@silence_warnings_decorator +def internal_evaluate_expression(dbg, seq, thread_id, frame_id, expression, is_exec, trim_if_too_big, attr_to_set_result): + ''' gets the value of a variable ''' + try: + frame = dbg.find_frame(thread_id, frame_id) + if frame is not None: + result = pydevd_vars.evaluate_expression(dbg, frame, expression, is_exec) + if attr_to_set_result != "": + pydevd_vars.change_attr_expression(frame, attr_to_set_result, expression, dbg, result) + else: + result = None + + xml = "" + xml += pydevd_xml.var_to_xml(result, expression, trim_if_too_big) + xml += "" + cmd = dbg.cmd_factory.make_evaluate_expression_message(seq, xml) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + cmd = dbg.cmd_factory.make_error_message(seq, "Error evaluating expression " + exc) + dbg.writer.add_command(cmd) + + +def _set_expression_response(py_db, request, result, error_message): + body = pydevd_schema.SetExpressionResponseBody(result='', variablesReference=0) + variables_response = pydevd_base_schema.build_response(request, kwargs={ + 'body':body, 'success':False, 'message': error_message}) + py_db.writer.add_command(NetCommand(CMD_RETURN, 0, variables_response, is_json=True)) + + +def internal_set_expression_json(py_db, request, thread_id): + # : :type arguments: SetExpressionArguments + + arguments = request.arguments + expression = arguments.expression + frame_id = arguments.frameId + value = arguments.value + fmt = arguments.format + if hasattr(fmt, 'to_dict'): + fmt = fmt.to_dict() + + frame = py_db.find_frame(thread_id, frame_id) + exec_code = '%s = (%s)' % (expression, value) + result = pydevd_vars.evaluate_expression(py_db, frame, exec_code, is_exec=True) + is_error = isinstance(result, ExceptionOnEvaluate) + + if is_error: + _set_expression_response(py_db, request, result, error_message='Error executing: %s' % (exec_code,)) + return + + # Ok, we have the result (could be an error), let's put it into the saved variables. + frame_tracker = py_db.suspended_frames_manager.get_frame_tracker(thread_id) + if frame_tracker is None: + # This is not really expected. + _set_expression_response(py_db, request, result, error_message='Thread id: %s is not current thread id.' % (thread_id,)) + return + + # Now that the exec is done, get the actual value changed to return. + result = pydevd_vars.evaluate_expression(py_db, frame, expression, is_exec=False) + variable = frame_tracker.obtain_as_variable(expression, result, frame=frame) + var_data = variable.get_var_data(fmt=fmt) + + body = pydevd_schema.SetExpressionResponseBody( + value=var_data['value'], + variablesReference=var_data.get('variablesReference', 0), + type=var_data.get('type'), + presentationHint=var_data.get('presentationHint'), + namedVariables=var_data.get('namedVariables'), + indexedVariables=var_data.get('indexedVariables'), + ) + variables_response = pydevd_base_schema.build_response(request, kwargs={'body':body}) + py_db.writer.add_command(NetCommand(CMD_RETURN, 0, variables_response, is_json=True)) + + +def internal_get_completions(dbg, seq, thread_id, frame_id, act_tok, line=-1, column=-1): + ''' + Note that if the column is >= 0, the act_tok is considered text and the actual + activation token/qualifier is computed in this command. + ''' + try: + remove_path = None + try: + qualifier = u'' + if column >= 0: + token_and_qualifier = extract_token_and_qualifier(act_tok, line, column) + act_tok = token_and_qualifier[0] + if act_tok: + act_tok += u'.' + qualifier = token_and_qualifier[1] + + frame = dbg.find_frame(thread_id, frame_id) + if frame is not None: + completions = _pydev_completer.generate_completions(frame, act_tok) + + # Note that qualifier and start are only actually valid for the + # Debug Adapter Protocol (for the line-based protocol, the IDE + # is required to filter the completions returned). + cmd = dbg.cmd_factory.make_get_completions_message( + seq, completions, qualifier, start=column - len(qualifier)) + dbg.writer.add_command(cmd) + else: + cmd = dbg.cmd_factory.make_error_message(seq, "internal_get_completions: Frame not found: %s from thread: %s" % (frame_id, thread_id)) + dbg.writer.add_command(cmd) + + finally: + if remove_path is not None: + sys.path.remove(remove_path) + + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(seq, "Error evaluating expression " + exc) + dbg.writer.add_command(cmd) + + +def internal_get_description(dbg, seq, thread_id, frame_id, expression): + ''' Fetch the variable description stub from the debug console + ''' + try: + frame = dbg.find_frame(thread_id, frame_id) + description = pydevd_console.get_description(frame, thread_id, frame_id, expression) + description = pydevd_xml.make_valid_xml_value(quote(description, '/>_= \t')) + description_xml = '' % description + cmd = dbg.cmd_factory.make_get_description_message(seq, description_xml) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + cmd = dbg.cmd_factory.make_error_message(seq, "Error in fetching description" + exc) + dbg.writer.add_command(cmd) + + +def build_exception_info_response(dbg, thread_id, request_seq, set_additional_thread_info, iter_visible_frames_info, max_frames): + ''' + :return ExceptionInfoResponse + ''' + thread = pydevd_find_thread_by_id(thread_id) + additional_info = set_additional_thread_info(thread) + topmost_frame = additional_info.get_topmost_frame(thread) + + current_paused_frame_name = '' + + source_path = '' # This is an extra bit of data used by Visual Studio + stack_str_lst = [] + name = None + description = None + + if topmost_frame is not None: + try: + try: + frames_list = dbg.suspended_frames_manager.get_frames_list(thread_id) + memo = set() + while frames_list is not None and len(frames_list): + frames = [] + + frame = None + + if not name: + exc_type = frames_list.exc_type + if exc_type is not None: + try: + name = exc_type.__qualname__ + except: + try: + name = exc_type.__name__ + except: + try: + name = str(exc_type) + except: + pass + + if not description: + exc_desc = frames_list.exc_desc + if exc_desc is not None: + try: + description = str(exc_desc) + except: + pass + + for frame_id, frame, method_name, original_filename, filename_in_utf8, lineno, _applied_mapping, show_as_current_frame in \ + iter_visible_frames_info(dbg, frames_list): + + line_text = linecache.getline(original_filename, lineno) + + # Never filter out plugin frames! + if not getattr(frame, 'IS_PLUGIN_FRAME', False): + if dbg.is_files_filter_enabled and dbg.apply_files_filter(frame, original_filename, False): + continue + + if show_as_current_frame: + current_paused_frame_name = method_name + method_name += ' (Current frame)' + frames.append((filename_in_utf8, lineno, method_name, line_text)) + + if not source_path and frames: + source_path = frames[0][0] + + stack_str = ''.join(traceback.format_list(frames[-max_frames:])) + stack_str += frames_list.exc_context_msg + stack_str_lst.append(stack_str) + + frames_list = create_frames_list_from_exception_cause( + frames_list.trace_obj, None, frames_list.exc_type, frames_list.exc_desc, memo) + if frames_list is None or not frames_list: + break + + except: + pydev_log.exception('Error on build_exception_info_response.') + finally: + topmost_frame = None + full_stack_str = ''.join(reversed(stack_str_lst)) + + if not name: + name = 'exception: type unknown' + if not description: + description = 'exception: no description' + + if current_paused_frame_name: + name += ' (note: full exception trace is shown but execution is paused at: %s)' % (current_paused_frame_name,) + + if thread.stop_reason == CMD_STEP_CAUGHT_EXCEPTION: + break_mode = pydevd_schema.ExceptionBreakMode.ALWAYS + else: + break_mode = pydevd_schema.ExceptionBreakMode.UNHANDLED + + response = pydevd_schema.ExceptionInfoResponse( + request_seq=request_seq, + success=True, + command='exceptionInfo', + body=pydevd_schema.ExceptionInfoResponseBody( + exceptionId=name, + description=description, + breakMode=break_mode, + details=pydevd_schema.ExceptionDetails( + message=description, + typeName=name, + stackTrace=full_stack_str, + source=source_path, + # Note: ExceptionDetails actually accepts an 'innerException', but + # when passing it, VSCode is not showing the stack trace at all. + ) + ) + ) + return response + + +def internal_get_exception_details_json(dbg, request, thread_id, max_frames, set_additional_thread_info=None, iter_visible_frames_info=None): + ''' Fetch exception details + ''' + try: + response = build_exception_info_response(dbg, thread_id, request.seq, set_additional_thread_info, iter_visible_frames_info, max_frames) + except: + exc = get_exception_traceback_str() + response = pydevd_base_schema.build_response(request, kwargs={ + 'success': False, + 'message': exc, + 'body':{} + }) + dbg.writer.add_command(NetCommand(CMD_RETURN, 0, response, is_json=True)) + + +class InternalGetBreakpointException(InternalThreadCommand): + ''' Send details of exception raised while evaluating conditional breakpoint ''' + + def __init__(self, thread_id, exc_type, stacktrace): + self.sequence = 0 + self.thread_id = thread_id + self.stacktrace = stacktrace + self.exc_type = exc_type + + def do_it(self, dbg): + try: + callstack = "" + + makeValid = pydevd_xml.make_valid_xml_value + + for filename, line, methodname, methodobj in self.stacktrace: + if not filesystem_encoding_is_utf8 and hasattr(filename, "decode"): + # filename is a byte string encoded using the file system encoding + # convert it to utf8 + filename = filename.decode(file_system_encoding).encode("utf-8") + + callstack += '' \ + % (self.thread_id, makeValid(filename), line, makeValid(methodname), makeValid(methodobj)) + callstack += "" + + cmd = dbg.cmd_factory.make_send_breakpoint_exception_message(self.sequence, self.exc_type + "\t" + callstack) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error Sending Exception: " + exc) + dbg.writer.add_command(cmd) + + +class InternalSendCurrExceptionTrace(InternalThreadCommand): + ''' Send details of the exception that was caught and where we've broken in. + ''' + + def __init__(self, thread_id, arg, curr_frame_id): + ''' + :param arg: exception type, description, traceback object + ''' + self.sequence = 0 + self.thread_id = thread_id + self.curr_frame_id = curr_frame_id + self.arg = arg + + def do_it(self, dbg): + try: + cmd = dbg.cmd_factory.make_send_curr_exception_trace_message(dbg, self.sequence, self.thread_id, self.curr_frame_id, *self.arg) + del self.arg + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error Sending Current Exception Trace: " + exc) + dbg.writer.add_command(cmd) + + +class InternalSendCurrExceptionTraceProceeded(InternalThreadCommand): + ''' Send details of the exception that was caught and where we've broken in. + ''' + + def __init__(self, thread_id): + self.sequence = 0 + self.thread_id = thread_id + + def do_it(self, dbg): + try: + cmd = dbg.cmd_factory.make_send_curr_exception_trace_proceeded_message(self.sequence, self.thread_id) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error Sending Current Exception Trace Proceeded: " + exc) + dbg.writer.add_command(cmd) + + +class InternalEvaluateConsoleExpression(InternalThreadCommand): + ''' Execute the given command in the debug console ''' + + def __init__(self, seq, thread_id, frame_id, line, buffer_output=True): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.line = line + self.buffer_output = buffer_output + + def do_it(self, dbg): + ''' Create an XML for console output, error and more (true/false) + + + + true/false + + ''' + try: + frame = dbg.find_frame(self.thread_id, self.frame_id) + if frame is not None: + console_message = pydevd_console.execute_console_command( + frame, self.thread_id, self.frame_id, self.line, self.buffer_output) + + cmd = dbg.cmd_factory.make_send_console_message(self.sequence, console_message.to_xml()) + else: + from _pydevd_bundle.pydevd_console import ConsoleMessage + console_message = ConsoleMessage() + console_message.add_console_message( + pydevd_console.CONSOLE_ERROR, + "Select the valid frame in the debug view (thread: %s, frame: %s invalid)" % (self.thread_id, self.frame_id), + ) + cmd = dbg.cmd_factory.make_error_message(self.sequence, console_message.to_xml()) + except: + exc = get_exception_traceback_str() + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error evaluating expression " + exc) + dbg.writer.add_command(cmd) + + +class InternalRunCustomOperation(InternalThreadCommand): + ''' Run a custom command on an expression + ''' + + def __init__(self, seq, thread_id, frame_id, scope, attrs, style, encoded_code_or_file, fnname): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.scope = scope + self.attrs = attrs + self.style = style + self.code_or_file = unquote_plus(encoded_code_or_file) + self.fnname = fnname + + def do_it(self, dbg): + try: + res = pydevd_vars.custom_operation(dbg, self.thread_id, self.frame_id, self.scope, self.attrs, + self.style, self.code_or_file, self.fnname) + resEncoded = quote_plus(res) + cmd = dbg.cmd_factory.make_custom_operation_message(self.sequence, resEncoded) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error in running custom operation" + exc) + dbg.writer.add_command(cmd) + + +class InternalConsoleGetCompletions(InternalThreadCommand): + ''' Fetch the completions in the debug console + ''' + + def __init__(self, seq, thread_id, frame_id, act_tok): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.act_tok = act_tok + + def do_it(self, dbg): + ''' Get completions and write back to the client + ''' + try: + frame = dbg.find_frame(self.thread_id, self.frame_id) + completions_xml = pydevd_console.get_completions(frame, self.act_tok) + cmd = dbg.cmd_factory.make_send_console_message(self.sequence, completions_xml) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error in fetching completions" + exc) + dbg.writer.add_command(cmd) + + +class InternalConsoleExec(InternalThreadCommand): + ''' gets the value of a variable ''' + + def __init__(self, seq, thread_id, frame_id, expression): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.expression = expression + + def do_it(self, dbg): + ''' Converts request into python variable ''' + try: + try: + # don't trace new threads created by console command + disable_trace_thread_modules() + + result = pydevconsole.console_exec(self.thread_id, self.frame_id, self.expression, dbg) + xml = "" + xml += pydevd_xml.var_to_xml(result, "") + xml += "" + cmd = dbg.cmd_factory.make_evaluate_expression_message(self.sequence, xml) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error evaluating console expression " + exc) + dbg.writer.add_command(cmd) + finally: + enable_trace_thread_modules() + + sys.stderr.flush() + sys.stdout.flush() + + +class InternalLoadFullValue(InternalThreadCommand): + ''' + Loads values asynchronously + ''' + + def __init__(self, seq, thread_id, frame_id, vars): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.vars = vars + + @silence_warnings_decorator + def do_it(self, dbg): + '''Starts a thread that will load values asynchronously''' + try: + var_objects = [] + for variable in self.vars: + variable = variable.strip() + if len(variable) > 0: + if '\t' in variable: # there are attributes beyond scope + scope, attrs = variable.split('\t', 1) + name = attrs[0] + else: + scope, attrs = (variable, None) + name = scope + var_obj = pydevd_vars.getVariable(dbg, self.thread_id, self.frame_id, scope, attrs) + var_objects.append((var_obj, name)) + + t = GetValueAsyncThreadDebug(dbg, dbg, self.sequence, var_objects) + t.start() + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error evaluating variable %s " % exc) + dbg.writer.add_command(cmd) + + +class AbstractGetValueAsyncThread(PyDBDaemonThread): + ''' + Abstract class for a thread, which evaluates values for async variables + ''' + + def __init__(self, py_db, frame_accessor, seq, var_objects): + PyDBDaemonThread.__init__(self, py_db) + self.frame_accessor = frame_accessor + self.seq = seq + self.var_objs = var_objects + self.cancel_event = threading.Event() + + def send_result(self, xml): + raise NotImplementedError() + + @overrides(PyDBDaemonThread._on_run) + def _on_run(self): + start = time.time() + xml = StringIO() + xml.write("") + for (var_obj, name) in self.var_objs: + current_time = time.time() + if current_time - start > ASYNC_EVAL_TIMEOUT_SEC or self.cancel_event.is_set(): + break + xml.write(pydevd_xml.var_to_xml(var_obj, name, evaluate_full_value=True)) + xml.write("") + self.send_result(xml) + xml.close() + + +class GetValueAsyncThreadDebug(AbstractGetValueAsyncThread): + ''' + A thread for evaluation async values, which returns result for debugger + Create message and send it via writer thread + ''' + + def send_result(self, xml): + if self.frame_accessor is not None: + cmd = self.frame_accessor.cmd_factory.make_load_full_value_message(self.seq, xml.getvalue()) + self.frame_accessor.writer.add_command(cmd) + + +class GetValueAsyncThreadConsole(AbstractGetValueAsyncThread): + ''' + A thread for evaluation async values, which returns result for Console + Send result directly to Console's server + ''' + + def send_result(self, xml): + if self.frame_accessor is not None: + self.frame_accessor.ReturnFullValue(self.seq, xml.getvalue()) + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm_constants.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm_constants.py new file mode 120000 index 0000000..3fa9451 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm_constants.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4b/66/a2/c8f4271feb9474fd618ad5763f676a33d1bcb77231d8259ddf13c5d3ce \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_command_line_handling.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_command_line_handling.py new file mode 120000 index 0000000..d790564 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_command_line_handling.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c2/d6/85/c7d8f6f24fb382115cc5a576268d27605edea82e55a41609e5b2b4550e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..3a260bb Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/pydevd_concurrency_logger.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/pydevd_concurrency_logger.cpython-38.pyc new file mode 100644 index 0000000..6e7fb55 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/pydevd_concurrency_logger.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/pydevd_thread_wrappers.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/pydevd_thread_wrappers.cpython-38.pyc new file mode 100644 index 0000000..e96b894 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__pycache__/pydevd_thread_wrappers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_concurrency_logger.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_concurrency_logger.py new file mode 100644 index 0000000..95fc054 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_concurrency_logger.py @@ -0,0 +1,346 @@ +import time + +from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle import pydevd_xml +from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder +from _pydevd_bundle.pydevd_constants import get_thread_id +from _pydevd_bundle.pydevd_net_command import NetCommand +from _pydevd_bundle.pydevd_concurrency_analyser.pydevd_thread_wrappers import ObjectWrapper, wrap_attr +import pydevd_file_utils +from _pydev_bundle import pydev_log +import sys + +file_system_encoding = getfilesystemencoding() + +from urllib.parse import quote + +threadingCurrentThread = threading.current_thread + +DONT_TRACE_THREADING = ['threading.py', 'pydevd.py'] +INNER_METHODS = ['_stop'] +INNER_FILES = ['threading.py'] +THREAD_METHODS = ['start', '_stop', 'join'] +LOCK_METHODS = ['__init__', 'acquire', 'release', '__enter__', '__exit__'] +QUEUE_METHODS = ['put', 'get'] + +# return time since epoch in milliseconds +cur_time = lambda: int(round(time.time() * 1000000)) + + +def get_text_list_for_frame(frame): + # partial copy-paste from make_thread_suspend_str + curFrame = frame + cmdTextList = [] + try: + while curFrame: + # print cmdText + myId = str(id(curFrame)) + # print "id is ", myId + + if curFrame.f_code is None: + break # Iron Python sometimes does not have it! + + myName = curFrame.f_code.co_name # method name (if in method) or ? if global + if myName is None: + break # Iron Python sometimes does not have it! + + # print "name is ", myName + + absolute_filename = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(curFrame)[0] + + my_file, _applied_mapping = pydevd_file_utils.map_file_to_client(absolute_filename) + + # print "file is ", my_file + # my_file = inspect.getsourcefile(curFrame) or inspect.getfile(frame) + + myLine = str(curFrame.f_lineno) + # print "line is ", myLine + + # the variables are all gotten 'on-demand' + # variables = pydevd_xml.frame_vars_to_xml(curFrame.f_locals) + + variables = '' + cmdTextList.append('' % (quote(my_file, '/>_= \t'), myLine)) + cmdTextList.append(variables) + cmdTextList.append("") + curFrame = curFrame.f_back + except: + pydev_log.exception() + + return cmdTextList + + +def send_concurrency_message(event_class, time, name, thread_id, type, event, file, line, frame, lock_id=0, parent=None): + dbg = GlobalDebuggerHolder.global_dbg + if dbg is None: + return + cmdTextList = [''] + + cmdTextList.append('<' + event_class) + cmdTextList.append(' time="%s"' % pydevd_xml.make_valid_xml_value(str(time))) + cmdTextList.append(' name="%s"' % pydevd_xml.make_valid_xml_value(name)) + cmdTextList.append(' thread_id="%s"' % pydevd_xml.make_valid_xml_value(thread_id)) + cmdTextList.append(' type="%s"' % pydevd_xml.make_valid_xml_value(type)) + if type == "lock": + cmdTextList.append(' lock_id="%s"' % pydevd_xml.make_valid_xml_value(str(lock_id))) + if parent is not None: + cmdTextList.append(' parent="%s"' % pydevd_xml.make_valid_xml_value(parent)) + cmdTextList.append(' event="%s"' % pydevd_xml.make_valid_xml_value(event)) + cmdTextList.append(' file="%s"' % pydevd_xml.make_valid_xml_value(file)) + cmdTextList.append(' line="%s"' % pydevd_xml.make_valid_xml_value(str(line))) + cmdTextList.append('>') + + cmdTextList += get_text_list_for_frame(frame) + cmdTextList.append('') + + text = ''.join(cmdTextList) + if dbg.writer is not None: + dbg.writer.add_command(NetCommand(145, 0, text)) + + +def log_new_thread(global_debugger, t): + event_time = cur_time() - global_debugger.thread_analyser.start_time + send_concurrency_message("threading_event", event_time, t.name, get_thread_id(t), "thread", + "start", "code_name", 0, None, parent=get_thread_id(t)) + + +class ThreadingLogger: + + def __init__(self): + self.start_time = cur_time() + + def set_start_time(self, time): + self.start_time = time + + def log_event(self, frame): + write_log = False + self_obj = None + if "self" in frame.f_locals: + self_obj = frame.f_locals["self"] + if isinstance(self_obj, threading.Thread) or self_obj.__class__ == ObjectWrapper: + write_log = True + if hasattr(frame, "f_back") and frame.f_back is not None: + back = frame.f_back + if hasattr(back, "f_back") and back.f_back is not None: + back = back.f_back + if "self" in back.f_locals: + if isinstance(back.f_locals["self"], threading.Thread): + write_log = True + try: + if write_log: + t = threadingCurrentThread() + back = frame.f_back + if not back: + return + name, _, back_base = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(back) + event_time = cur_time() - self.start_time + method_name = frame.f_code.co_name + + if isinstance(self_obj, threading.Thread): + if not hasattr(self_obj, "_pydev_run_patched"): + wrap_attr(self_obj, "run") + if (method_name in THREAD_METHODS) and (back_base not in DONT_TRACE_THREADING or \ + (method_name in INNER_METHODS and back_base in INNER_FILES)): + thread_id = get_thread_id(self_obj) + name = self_obj.getName() + real_method = frame.f_code.co_name + parent = None + if real_method == "_stop": + if back_base in INNER_FILES and \ + back.f_code.co_name == "_wait_for_tstate_lock": + back = back.f_back.f_back + real_method = "stop" + if hasattr(self_obj, "_pydev_join_called"): + parent = get_thread_id(t) + elif real_method == "join": + # join called in the current thread, not in self object + if not self_obj.is_alive(): + return + thread_id = get_thread_id(t) + name = t.name + self_obj._pydev_join_called = True + + if real_method == "start": + parent = get_thread_id(t) + send_concurrency_message("threading_event", event_time, name, thread_id, "thread", + real_method, back.f_code.co_filename, back.f_lineno, back, parent=parent) + # print(event_time, self_obj.getName(), thread_id, "thread", + # real_method, back.f_code.co_filename, back.f_lineno) + + if method_name == "pydev_after_run_call": + if hasattr(frame, "f_back") and frame.f_back is not None: + back = frame.f_back + if hasattr(back, "f_back") and back.f_back is not None: + back = back.f_back + if "self" in back.f_locals: + if isinstance(back.f_locals["self"], threading.Thread): + my_self_obj = frame.f_back.f_back.f_locals["self"] + my_back = frame.f_back.f_back + my_thread_id = get_thread_id(my_self_obj) + send_massage = True + if hasattr(my_self_obj, "_pydev_join_called"): + send_massage = False + # we can't detect stop after join in Python 2 yet + if send_massage: + send_concurrency_message("threading_event", event_time, "Thread", my_thread_id, "thread", + "stop", my_back.f_code.co_filename, my_back.f_lineno, my_back, parent=None) + + if self_obj.__class__ == ObjectWrapper: + if back_base in DONT_TRACE_THREADING: + # do not trace methods called from threading + return + back_back_base = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(back.f_back)[2] + back = back.f_back + if back_back_base in DONT_TRACE_THREADING: + # back_back_base is the file, where the method was called froms + return + if method_name == "__init__": + send_concurrency_message("threading_event", event_time, t.name, get_thread_id(t), "lock", + method_name, back.f_code.co_filename, back.f_lineno, back, lock_id=str(id(frame.f_locals["self"]))) + if "attr" in frame.f_locals and \ + (frame.f_locals["attr"] in LOCK_METHODS or + frame.f_locals["attr"] in QUEUE_METHODS): + real_method = frame.f_locals["attr"] + if method_name == "call_begin": + real_method += "_begin" + elif method_name == "call_end": + real_method += "_end" + else: + return + if real_method == "release_end": + # do not log release end. Maybe use it later + return + send_concurrency_message("threading_event", event_time, t.name, get_thread_id(t), "lock", + real_method, back.f_code.co_filename, back.f_lineno, back, lock_id=str(id(self_obj))) + + if real_method in ("put_end", "get_end"): + # fake release for queue, cause we don't call it directly + send_concurrency_message("threading_event", event_time, t.name, get_thread_id(t), "lock", + "release", back.f_code.co_filename, back.f_lineno, back, lock_id=str(id(self_obj))) + # print(event_time, t.name, get_thread_id(t), "lock", + # real_method, back.f_code.co_filename, back.f_lineno) + + except Exception: + pydev_log.exception() + + +class NameManager: + + def __init__(self, name_prefix): + self.tasks = {} + self.last = 0 + self.prefix = name_prefix + + def get(self, id): + if id not in self.tasks: + self.last += 1 + self.tasks[id] = self.prefix + "-" + str(self.last) + return self.tasks[id] + + +class AsyncioLogger: + + def __init__(self): + self.task_mgr = NameManager("Task") + self.coro_mgr = NameManager("Coro") + self.start_time = cur_time() + + def get_task_id(self, frame): + asyncio = sys.modules.get('asyncio') + if asyncio is None: + # If asyncio was not imported, there's nothing to be done + # (also fixes issue where multiprocessing is imported due + # to asyncio). + return None + while frame is not None: + if "self" in frame.f_locals: + self_obj = frame.f_locals["self"] + if isinstance(self_obj, asyncio.Task): + method_name = frame.f_code.co_name + if method_name == "_step": + return id(self_obj) + frame = frame.f_back + return None + + def log_event(self, frame): + event_time = cur_time() - self.start_time + + # Debug loop iterations + # if isinstance(self_obj, asyncio.base_events.BaseEventLoop): + # if method_name == "_run_once": + # print("Loop iteration") + + if not hasattr(frame, "f_back") or frame.f_back is None: + return + + asyncio = sys.modules.get('asyncio') + if asyncio is None: + # If asyncio was not imported, there's nothing to be done + # (also fixes issue where multiprocessing is imported due + # to asyncio). + return + + back = frame.f_back + + if "self" in frame.f_locals: + self_obj = frame.f_locals["self"] + if isinstance(self_obj, asyncio.Task): + method_name = frame.f_code.co_name + if method_name == "set_result": + task_id = id(self_obj) + task_name = self.task_mgr.get(str(task_id)) + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "thread", "stop", frame.f_code.co_filename, + frame.f_lineno, frame) + + method_name = back.f_code.co_name + if method_name == "__init__": + task_id = id(self_obj) + task_name = self.task_mgr.get(str(task_id)) + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "thread", "start", frame.f_code.co_filename, + frame.f_lineno, frame) + + method_name = frame.f_code.co_name + if isinstance(self_obj, asyncio.Lock): + if method_name in ("acquire", "release"): + task_id = self.get_task_id(frame) + task_name = self.task_mgr.get(str(task_id)) + + if method_name == "acquire": + if not self_obj._waiters and not self_obj.locked(): + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock", + method_name + "_begin", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + if self_obj.locked(): + method_name += "_begin" + else: + method_name += "_end" + elif method_name == "release": + method_name += "_end" + + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock", + method_name, frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + + if isinstance(self_obj, asyncio.Queue): + if method_name in ("put", "get", "_put", "_get"): + task_id = self.get_task_id(frame) + task_name = self.task_mgr.get(str(task_id)) + + if method_name == "put": + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock", + "acquire_begin", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + elif method_name == "_put": + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock", + "acquire_end", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock", + "release", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + elif method_name == "get": + back = frame.f_back + if back.f_code.co_name != "send": + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock", + "acquire_begin", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + else: + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock", + "acquire_end", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock", + "release", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_thread_wrappers.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_thread_wrappers.py new file mode 100644 index 0000000..526bb0c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_thread_wrappers.py @@ -0,0 +1,83 @@ +from _pydev_bundle._pydev_saved_modules import threading + + +def wrapper(fun): + + def pydev_after_run_call(): + pass + + def inner(*args, **kwargs): + fun(*args, **kwargs) + pydev_after_run_call() + + return inner + + +def wrap_attr(obj, attr): + t_save_start = getattr(obj, attr) + setattr(obj, attr, wrapper(t_save_start)) + obj._pydev_run_patched = True + + +class ObjectWrapper(object): + + def __init__(self, obj): + self.wrapped_object = obj + try: + import functools + functools.update_wrapper(self, obj) + except: + pass + + def __getattr__(self, attr): + orig_attr = getattr(self.wrapped_object, attr) # .__getattribute__(attr) + if callable(orig_attr): + + def patched_attr(*args, **kwargs): + self.call_begin(attr) + result = orig_attr(*args, **kwargs) + self.call_end(attr) + if result == self.wrapped_object: + return self + return result + + return patched_attr + else: + return orig_attr + + def call_begin(self, attr): + pass + + def call_end(self, attr): + pass + + def __enter__(self): + self.call_begin("__enter__") + self.wrapped_object.__enter__() + self.call_end("__enter__") + + def __exit__(self, exc_type, exc_val, exc_tb): + self.call_begin("__exit__") + self.wrapped_object.__exit__(exc_type, exc_val, exc_tb) + + +def factory_wrapper(fun): + + def inner(*args, **kwargs): + obj = fun(*args, **kwargs) + return ObjectWrapper(obj) + + return inner + + +def wrap_threads(): + # TODO: add wrappers for thread and _thread + # import _thread as mod + # print("Thread imported") + # mod.start_new_thread = wrapper(mod.start_new_thread) + threading.Lock = factory_wrapper(threading.Lock) + threading.RLock = factory_wrapper(threading.RLock) + + # queue patching + import queue # @UnresolvedImport + queue.Queue = factory_wrapper(queue.Queue) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_console.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_console.py new file mode 100644 index 0000000..925e010 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_console.py @@ -0,0 +1,270 @@ +'''An helper file for the pydev debugger (REPL) console +''' +import sys +import traceback +from _pydevd_bundle.pydevconsole_code import InteractiveConsole, _EvalAwaitInNewEventLoop +from _pydev_bundle import _pydev_completer +from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn +from _pydev_bundle.pydev_imports import Exec +from _pydev_bundle.pydev_override import overrides +from _pydevd_bundle import pydevd_save_locals +from _pydevd_bundle.pydevd_io import IOBuf +from pydevd_tracing import get_exception_traceback_str +from _pydevd_bundle.pydevd_xml import make_valid_xml_value +import inspect +from _pydevd_bundle.pydevd_save_locals import update_globals_and_locals + +CONSOLE_OUTPUT = "output" +CONSOLE_ERROR = "error" + + +#======================================================================================================================= +# ConsoleMessage +#======================================================================================================================= +class ConsoleMessage: + """Console Messages + """ + + def __init__(self): + self.more = False + # List of tuple [('error', 'error_message'), ('message_list', 'output_message')] + self.console_messages = [] + + def add_console_message(self, message_type, message): + """add messages in the console_messages list + """ + for m in message.split("\n"): + if m.strip(): + self.console_messages.append((message_type, m)) + + def update_more(self, more): + """more is set to true if further input is required from the user + else more is set to false + """ + self.more = more + + def to_xml(self): + """Create an XML for console message_list, error and more (true/false) + + console message_list + console error + true/false + + """ + makeValid = make_valid_xml_value + + xml = '%s' % (self.more) + + for message_type, message in self.console_messages: + xml += '<%s message="%s">' % (message_type, makeValid(message), message_type) + + xml += '' + + return xml + + +#======================================================================================================================= +# _DebugConsoleStdIn +#======================================================================================================================= +class _DebugConsoleStdIn(BaseStdIn): + + @overrides(BaseStdIn.readline) + def readline(self, *args, **kwargs): + sys.stderr.write('Warning: Reading from stdin is still not supported in this console.\n') + return '\n' + + +#======================================================================================================================= +# DebugConsole +#======================================================================================================================= +class DebugConsole(InteractiveConsole, BaseInterpreterInterface): + """Wrapper around code.InteractiveConsole, in order to send + errors and outputs to the debug console + """ + + @overrides(BaseInterpreterInterface.create_std_in) + def create_std_in(self, *args, **kwargs): + try: + if not self.__buffer_output: + return sys.stdin + except: + pass + + return _DebugConsoleStdIn() # If buffered, raw_input is not supported in this console. + + @overrides(InteractiveConsole.push) + def push(self, line, frame, buffer_output=True): + """Change built-in stdout and stderr methods by the + new custom StdMessage. + execute the InteractiveConsole.push. + Change the stdout and stderr back be the original built-ins + + :param buffer_output: if False won't redirect the output. + + Return boolean (True if more input is required else False), + output_messages and input_messages + """ + self.__buffer_output = buffer_output + more = False + if buffer_output: + original_stdout = sys.stdout + original_stderr = sys.stderr + try: + try: + self.frame = frame + if buffer_output: + out = sys.stdout = IOBuf() + err = sys.stderr = IOBuf() + more = self.add_exec(line) + except Exception: + exc = get_exception_traceback_str() + if buffer_output: + err.buflist.append("Internal Error: %s" % (exc,)) + else: + sys.stderr.write("Internal Error: %s\n" % (exc,)) + finally: + # Remove frame references. + self.frame = None + frame = None + if buffer_output: + sys.stdout = original_stdout + sys.stderr = original_stderr + + if buffer_output: + return more, out.buflist, err.buflist + else: + return more, [], [] + + @overrides(BaseInterpreterInterface.do_add_exec) + def do_add_exec(self, line): + return InteractiveConsole.push(self, line) + + @overrides(InteractiveConsole.runcode) + def runcode(self, code): + """Execute a code object. + + When an exception occurs, self.showtraceback() is called to + display a traceback. All exceptions are caught except + SystemExit, which is reraised. + + A note about KeyboardInterrupt: this exception may occur + elsewhere in this code, and may not always be caught. The + caller should be prepared to deal with it. + + """ + try: + updated_globals = self.get_namespace() + initial_globals = updated_globals.copy() + + updated_locals = None + + is_async = False + if hasattr(inspect, 'CO_COROUTINE'): + is_async = inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE + + if is_async: + t = _EvalAwaitInNewEventLoop(code, updated_globals, updated_locals) + t.start() + t.join() + + update_globals_and_locals(updated_globals, initial_globals, self.frame) + if t.exc: + raise t.exc[1].with_traceback(t.exc[2]) + + else: + try: + exec(code, updated_globals, updated_locals) + finally: + update_globals_and_locals(updated_globals, initial_globals, self.frame) + except SystemExit: + raise + except: + # In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+ + # (showtraceback does it on python 3.5 onwards) + sys.excepthook = sys.__excepthook__ + try: + self.showtraceback() + finally: + sys.__excepthook__ = sys.excepthook + + def get_namespace(self): + dbg_namespace = {} + dbg_namespace.update(self.frame.f_globals) + dbg_namespace.update(self.frame.f_locals) # locals later because it has precedence over the actual globals + return dbg_namespace + + +#======================================================================================================================= +# InteractiveConsoleCache +#======================================================================================================================= +class InteractiveConsoleCache: + + thread_id = None + frame_id = None + interactive_console_instance = None + + +# Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions. +def get_interactive_console(thread_id, frame_id, frame, console_message): + """returns the global interactive console. + interactive console should have been initialized by this time + :rtype: DebugConsole + """ + if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id: + return InteractiveConsoleCache.interactive_console_instance + + InteractiveConsoleCache.interactive_console_instance = DebugConsole() + InteractiveConsoleCache.thread_id = thread_id + InteractiveConsoleCache.frame_id = frame_id + + console_stacktrace = traceback.extract_stack(frame, limit=1) + if console_stacktrace: + current_context = console_stacktrace[0] # top entry from stacktrace + context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2]) + console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,)) + return InteractiveConsoleCache.interactive_console_instance + + +def clear_interactive_console(): + InteractiveConsoleCache.thread_id = None + InteractiveConsoleCache.frame_id = None + InteractiveConsoleCache.interactive_console_instance = None + + +def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True): + """fetch an interactive console instance from the cache and + push the received command to the console. + + create and return an instance of console_message + """ + console_message = ConsoleMessage() + + interpreter = get_interactive_console(thread_id, frame_id, frame, console_message) + more, output_messages, error_messages = interpreter.push(line, frame, buffer_output) + console_message.update_more(more) + + for message in output_messages: + console_message.add_console_message(CONSOLE_OUTPUT, message) + + for message in error_messages: + console_message.add_console_message(CONSOLE_ERROR, message) + + return console_message + + +def get_description(frame, thread_id, frame_id, expression): + console_message = ConsoleMessage() + interpreter = get_interactive_console(thread_id, frame_id, frame, console_message) + try: + interpreter.frame = frame + return interpreter.getDescription(expression) + finally: + interpreter.frame = None + + +def get_completions(frame, act_tok): + """ fetch all completions, create xml for the same + return the completions xml + """ + return _pydev_completer.generate_completions_as_xml(frame, act_tok) + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_constants.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_constants.py new file mode 100644 index 0000000..1b36524 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_constants.py @@ -0,0 +1,792 @@ +''' +This module holds the constants used for specifying the states of the debugger. +''' +from __future__ import nested_scopes +import platform +import weakref +import struct +import warnings +import functools +from contextlib import contextmanager + +STATE_RUN = 1 +STATE_SUSPEND = 2 + +PYTHON_SUSPEND = 1 +DJANGO_SUSPEND = 2 +JINJA2_SUSPEND = 3 + +int_types = (int,) + +# types does not include a MethodWrapperType +try: + MethodWrapperType = type([].__str__) +except: + MethodWrapperType = None + +import sys # Note: the sys import must be here anyways (others depend on it) + +# Preload codecs to avoid imports to them later on which can potentially halt the debugger. +import codecs as _codecs +for _codec in ["ascii", "utf8", "utf-8", "latin1", "latin-1", "idna"]: + _codecs.lookup(_codec) + + +class DebugInfoHolder: + # we have to put it here because it can be set through the command line (so, the + # already imported references would not have it). + + # General information + DEBUG_TRACE_LEVEL = 0 # 0 = critical, 1 = info, 2 = debug, 3 = verbose + + # Flags to debug specific points of the code. + DEBUG_RECORD_SOCKET_READS = False + DEBUG_TRACE_BREAKPOINTS = -1 + + PYDEVD_DEBUG_FILE = None + + +# Any filename that starts with these strings is not traced nor shown to the user. +# In Python 3.7 " ..." can appear and should be ignored for the user. +# has special heuristics to know whether it should be traced or not (it's part of +# user code when it's the used in python -c and part of the library otherwise). + +# Any filename that starts with these strings is considered user (project) code. Note +# that files for which we have a source mapping are also considered as a part of the project. +USER_CODE_BASENAMES_STARTING_WITH = (' (2 ** 32) + +IS_JYTHON = pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON + +IS_PYPY = platform.python_implementation() == 'PyPy' + +if IS_JYTHON: + import java.lang.System # @UnresolvedImport + IS_WINDOWS = java.lang.System.getProperty("os.name").lower().startswith("windows") + +USE_CUSTOM_SYS_CURRENT_FRAMES = not hasattr(sys, '_current_frames') or IS_PYPY +USE_CUSTOM_SYS_CURRENT_FRAMES_MAP = USE_CUSTOM_SYS_CURRENT_FRAMES and (IS_PYPY or IS_IRONPYTHON) + +if USE_CUSTOM_SYS_CURRENT_FRAMES: + + # Some versions of Jython don't have it (but we can provide a replacement) + if IS_JYTHON: + from java.lang import NoSuchFieldException + from org.python.core import ThreadStateMapping + try: + cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + except NoSuchFieldException: + cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 + cachedThreadState.accessible = True + thread_states = cachedThreadState.get(ThreadStateMapping) + + def _current_frames(): + as_array = thread_states.entrySet().toArray() + ret = {} + for thread_to_state in as_array: + thread = thread_to_state.getKey() + if thread is None: + continue + thread_state = thread_to_state.getValue() + if thread_state is None: + continue + + frame = thread_state.frame + if frame is None: + continue + + ret[thread.getId()] = frame + return ret + + elif USE_CUSTOM_SYS_CURRENT_FRAMES_MAP: + constructed_tid_to_last_frame = {} + + # IronPython doesn't have it. Let's use our workaround... + def _current_frames(): + return constructed_tid_to_last_frame + + else: + raise RuntimeError('Unable to proceed (sys._current_frames not available in this Python implementation).') +else: + _current_frames = sys._current_frames + +IS_PYTHON_STACKLESS = "stackless" in sys.version.lower() +CYTHON_SUPPORTED = False + +python_implementation = platform.python_implementation() +if python_implementation == 'CPython': + # Only available for CPython! + CYTHON_SUPPORTED = True + +#======================================================================================================================= +# Python 3? +#======================================================================================================================= +IS_PY36_OR_GREATER = sys.version_info >= (3, 6) +IS_PY37_OR_GREATER = sys.version_info >= (3, 7) +IS_PY38_OR_GREATER = sys.version_info >= (3, 8) +IS_PY39_OR_GREATER = sys.version_info >= (3, 9) +IS_PY310_OR_GREATER = sys.version_info >= (3, 10) +IS_PY311_OR_GREATER = sys.version_info >= (3, 11) + + +def version_str(v): + return '.'.join((str(x) for x in v[:3])) + ''.join((str(x) for x in v[3:])) + + +PY_VERSION_STR = version_str(sys.version_info) +try: + PY_IMPL_VERSION_STR = version_str(sys.implementation.version) +except AttributeError: + PY_IMPL_VERSION_STR = '' + +try: + PY_IMPL_NAME = sys.implementation.name +except AttributeError: + PY_IMPL_NAME = '' + +ENV_TRUE_LOWER_VALUES = ('yes', 'true', '1') +ENV_FALSE_LOWER_VALUES = ('no', 'false', '0') + + +def is_true_in_env(env_key): + if isinstance(env_key, tuple): + # If a tuple, return True if any of those ends up being true. + for v in env_key: + if is_true_in_env(v): + return True + return False + else: + return os.getenv(env_key, '').lower() in ENV_TRUE_LOWER_VALUES + + +def as_float_in_env(env_key, default): + value = os.getenv(env_key) + if value is None: + return default + try: + return float(value) + except Exception: + raise RuntimeError( + 'Error: expected the env variable: %s to be set to a float value. Found: %s' % ( + env_key, value)) + + +def as_int_in_env(env_key, default): + value = os.getenv(env_key) + if value is None: + return default + try: + return int(value) + except Exception: + raise RuntimeError( + 'Error: expected the env variable: %s to be set to a int value. Found: %s' % ( + env_key, value)) + + +# If true in env, use gevent mode. +SUPPORT_GEVENT = is_true_in_env('GEVENT_SUPPORT') + +# Opt-in support to show gevent paused greenlets. False by default because if too many greenlets are +# paused the UI can slow-down (i.e.: if 1000 greenlets are paused, each one would be shown separate +# as a different thread, but if the UI isn't optimized for that the experience is lacking...). +GEVENT_SHOW_PAUSED_GREENLETS = is_true_in_env('GEVENT_SHOW_PAUSED_GREENLETS') + +DISABLE_FILE_VALIDATION = is_true_in_env('PYDEVD_DISABLE_FILE_VALIDATION') + +GEVENT_SUPPORT_NOT_SET_MSG = os.getenv( + 'GEVENT_SUPPORT_NOT_SET_MSG', + 'It seems that the gevent monkey-patching is being used.\n' + 'Please set an environment variable with:\n' + 'GEVENT_SUPPORT=True\n' + 'to enable gevent support in the debugger.' +) + +USE_LIB_COPY = SUPPORT_GEVENT + +INTERACTIVE_MODE_AVAILABLE = sys.platform in ('darwin', 'win32') or os.getenv('DISPLAY') is not None + +# If true in env, forces cython to be used (raises error if not available). +# If false in env, disables it. +# If not specified, uses default heuristic to determine if it should be loaded. +USE_CYTHON_FLAG = os.getenv('PYDEVD_USE_CYTHON') + +if USE_CYTHON_FLAG is not None: + USE_CYTHON_FLAG = USE_CYTHON_FLAG.lower() + if USE_CYTHON_FLAG not in ENV_TRUE_LOWER_VALUES and USE_CYTHON_FLAG not in ENV_FALSE_LOWER_VALUES: + raise RuntimeError('Unexpected value for PYDEVD_USE_CYTHON: %s (enable with one of: %s, disable with one of: %s)' % ( + USE_CYTHON_FLAG, ENV_TRUE_LOWER_VALUES, ENV_FALSE_LOWER_VALUES)) + +else: + if not CYTHON_SUPPORTED: + USE_CYTHON_FLAG = 'no' + +# If true in env, forces frame eval to be used (raises error if not available). +# If false in env, disables it. +# If not specified, uses default heuristic to determine if it should be loaded. +PYDEVD_USE_FRAME_EVAL = os.getenv('PYDEVD_USE_FRAME_EVAL', '').lower() + +PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING = is_true_in_env('PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING') + +# If specified in PYDEVD_IPYTHON_CONTEXT it must be a string with the basename +# and then the name of 2 methods in which the evaluate is done. +PYDEVD_IPYTHON_CONTEXT = ('interactiveshell.py', 'run_code', 'run_ast_nodes') +_ipython_ctx = os.getenv('PYDEVD_IPYTHON_CONTEXT') +if _ipython_ctx: + PYDEVD_IPYTHON_CONTEXT = tuple(x.strip() for x in _ipython_ctx.split(',')) + assert len(PYDEVD_IPYTHON_CONTEXT) == 3, 'Invalid PYDEVD_IPYTHON_CONTEXT: %s' % (_ipython_ctx,) + +# Use to disable loading the lib to set tracing to all threads (default is using heuristics based on where we're running). +LOAD_NATIVE_LIB_FLAG = os.getenv('PYDEVD_LOAD_NATIVE_LIB', '').lower() + +LOG_TIME = os.getenv('PYDEVD_LOG_TIME', 'true').lower() in ENV_TRUE_LOWER_VALUES + +SHOW_COMPILE_CYTHON_COMMAND_LINE = is_true_in_env('PYDEVD_SHOW_COMPILE_CYTHON_COMMAND_LINE') + +LOAD_VALUES_ASYNC = is_true_in_env('PYDEVD_LOAD_VALUES_ASYNC') +DEFAULT_VALUE = "__pydevd_value_async" +ASYNC_EVAL_TIMEOUT_SEC = 60 +NEXT_VALUE_SEPARATOR = "__pydev_val__" +BUILTINS_MODULE_NAME = 'builtins' +SHOW_DEBUG_INFO_ENV = is_true_in_env(('PYCHARM_DEBUG', 'PYDEV_DEBUG', 'PYDEVD_DEBUG')) + +# Pandas customization. +PANDAS_MAX_ROWS = as_int_in_env('PYDEVD_PANDAS_MAX_ROWS', 60) +PANDAS_MAX_COLS = as_int_in_env('PYDEVD_PANDAS_MAX_COLS', 10) +PANDAS_MAX_COLWIDTH = as_int_in_env('PYDEVD_PANDAS_MAX_COLWIDTH', 50) + +# If getting an attribute or computing some value is too slow, let the user know if the given timeout elapses. +PYDEVD_WARN_SLOW_RESOLVE_TIMEOUT = as_float_in_env('PYDEVD_WARN_SLOW_RESOLVE_TIMEOUT', 0.15) + +# This timeout is used to track the time to send a message saying that the evaluation +# is taking too long and possible mitigations. +PYDEVD_WARN_EVALUATION_TIMEOUT = as_float_in_env('PYDEVD_WARN_EVALUATION_TIMEOUT', 3.) + +# If True in env shows a thread dump when the evaluation times out. +PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT = is_true_in_env('PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT') + +# This timeout is used only when the mode that all threads are stopped/resumed at once is used +# (i.e.: multi_threads_single_notification) +# +# In this mode, if some evaluation doesn't finish until this timeout, we notify the user +# and then resume all threads until the evaluation finishes. +# +# A negative value will disable the timeout and a value of 0 will automatically run all threads +# (without any notification) when the evaluation is started and pause all threads when the +# evaluation is finished. A positive value will run run all threads after the timeout +# elapses. +PYDEVD_UNBLOCK_THREADS_TIMEOUT = as_float_in_env('PYDEVD_UNBLOCK_THREADS_TIMEOUT', -1.) + +# Timeout to interrupt a thread (so, if some evaluation doesn't finish until this +# timeout, the thread doing the evaluation is interrupted). +# A value <= 0 means this is disabled. +# See: _pydevd_bundle.pydevd_timeout.create_interrupt_this_thread_callback for details +# on how the thread interruption works (there are some caveats related to it). +PYDEVD_INTERRUPT_THREAD_TIMEOUT = as_float_in_env('PYDEVD_INTERRUPT_THREAD_TIMEOUT', -1) + +# If PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS is set to False, the patching to hide pydevd threads won't be applied. +PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS = os.getenv('PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS', 'true').lower() in ENV_TRUE_LOWER_VALUES + +EXCEPTION_TYPE_UNHANDLED = 'UNHANDLED' +EXCEPTION_TYPE_USER_UNHANDLED = 'USER_UNHANDLED' +EXCEPTION_TYPE_HANDLED = 'HANDLED' + +if SHOW_DEBUG_INFO_ENV: + # show debug info before the debugger start + DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True + DebugInfoHolder.DEBUG_TRACE_LEVEL = 3 + DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 1 + +DebugInfoHolder.PYDEVD_DEBUG_FILE = os.getenv('PYDEVD_DEBUG_FILE') + + +def protect_libraries_from_patching(): + """ + In this function we delete some modules from `sys.modules` dictionary and import them again inside + `_pydev_saved_modules` in order to save their original copies there. After that we can use these + saved modules within the debugger to protect them from patching by external libraries (e.g. gevent). + """ + patched = ['threading', 'thread', '_thread', 'time', 'socket', 'queue', 'select', + 'xmlrpclib', 'SimpleXMLRPCServer', 'BaseHTTPServer', 'SocketServer', + 'xmlrpc.client', 'xmlrpc.server', 'http.server', 'socketserver'] + + for name in patched: + try: + __import__(name) + except: + pass + + patched_modules = dict([(k, v) for k, v in sys.modules.items() + if k in patched]) + + for name in patched_modules: + del sys.modules[name] + + # import for side effects + import _pydev_bundle._pydev_saved_modules + + for name in patched_modules: + sys.modules[name] = patched_modules[name] + + +if USE_LIB_COPY: + protect_libraries_from_patching() + +from _pydev_bundle._pydev_saved_modules import thread, threading + +_fork_safe_locks = [] + +if IS_JYTHON: + + def ForkSafeLock(rlock=False): + if rlock: + return threading.RLock() + else: + return threading.Lock() + +else: + + class ForkSafeLock(object): + ''' + A lock which is fork-safe (when a fork is done, `pydevd_constants.after_fork()` + should be called to reset the locks in the new process to avoid deadlocks + from a lock which was locked during the fork). + + Note: + Unlike `threading.Lock` this class is not completely atomic, so, doing: + + lock = ForkSafeLock() + with lock: + ... + + is different than using `threading.Lock` directly because the tracing may + find an additional function call on `__enter__` and on `__exit__`, so, it's + not recommended to use this in all places, only where the forking may be important + (so, for instance, the locks on PyDB should not be changed to this lock because + of that -- and those should all be collected in the new process because PyDB itself + should be completely cleared anyways). + + It's possible to overcome this limitation by using `ForkSafeLock.acquire` and + `ForkSafeLock.release` instead of the context manager (as acquire/release are + bound to the original implementation, whereas __enter__/__exit__ is not due to Python + limitations). + ''' + + def __init__(self, rlock=False): + self._rlock = rlock + self._init() + _fork_safe_locks.append(weakref.ref(self)) + + def __enter__(self): + return self._lock.__enter__() + + def __exit__(self, exc_type, exc_val, exc_tb): + return self._lock.__exit__(exc_type, exc_val, exc_tb) + + def _init(self): + if self._rlock: + self._lock = threading.RLock() + else: + self._lock = thread.allocate_lock() + + self.acquire = self._lock.acquire + self.release = self._lock.release + _fork_safe_locks.append(weakref.ref(self)) + + +def after_fork(): + ''' + Must be called after a fork operation (will reset the ForkSafeLock). + ''' + global _fork_safe_locks + locks = _fork_safe_locks[:] + _fork_safe_locks = [] + for lock in locks: + lock = lock() + if lock is not None: + lock._init() + + +_thread_id_lock = ForkSafeLock() +thread_get_ident = thread.get_ident + + +def as_str(s): + assert isinstance(s, str) + return s + + +@contextmanager +def filter_all_warnings(): + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + yield + + +def silence_warnings_decorator(func): + + @functools.wraps(func) + def new_func(*args, **kwargs): + with filter_all_warnings(): + return func(*args, **kwargs) + + return new_func + + +def sorted_dict_repr(d): + s = sorted(d.items(), key=lambda x:str(x[0])) + return '{' + ', '.join(('%r: %r' % x) for x in s) + '}' + + +def iter_chars(b): + # In Python 2, we can iterate bytes or str with individual characters, but Python 3 onwards + # changed that behavior so that when iterating bytes we actually get ints! + if isinstance(b, bytes): + # i.e.: do something as struct.unpack('3c', b) + return iter(struct.unpack(str(len(b)) + 'c', b)) + return iter(b) + + +if IS_JYTHON: + + def NO_FTRACE(frame, event, arg): + return None + +else: + _curr_trace = sys.gettrace() + + # Set a temporary trace which does nothing for us to test (otherwise setting frame.f_trace has no + # effect). + def _temp_trace(frame, event, arg): + return None + + sys.settrace(_temp_trace) + + def _check_ftrace_set_none(): + ''' + Will throw an error when executing a line event + ''' + sys._getframe().f_trace = None + _line_event = 1 + _line_event = 2 + + try: + _check_ftrace_set_none() + + def NO_FTRACE(frame, event, arg): + frame.f_trace = None + return None + + except TypeError: + + def NO_FTRACE(frame, event, arg): + # In Python <= 2.6 and <= 3.4, if we're tracing a method, frame.f_trace may not be set + # to None, it must always be set to a tracing function. + # See: tests_python.test_tracing_gotchas.test_tracing_gotchas + # + # Note: Python 2.7 sometimes works and sometimes it doesn't depending on the minor + # version because of https://bugs.python.org/issue20041 (although bug reports didn't + # include the minor version, so, mark for any Python 2.7 as I'm not completely sure + # the fix in later 2.7 versions is the same one we're dealing with). + return None + + sys.settrace(_curr_trace) + + +#======================================================================================================================= +# get_pid +#======================================================================================================================= +def get_pid(): + try: + return os.getpid() + except AttributeError: + try: + # Jython does not have it! + import java.lang.management.ManagementFactory # @UnresolvedImport -- just for jython + pid = java.lang.management.ManagementFactory.getRuntimeMXBean().getName() + return pid.replace('@', '_') + except: + # ok, no pid available (will be unable to debug multiple processes) + return '000001' + + +def clear_cached_thread_id(thread): + with _thread_id_lock: + try: + if thread.__pydevd_id__ != 'console_main': + # The console_main is a special thread id used in the console and its id should never be reset + # (otherwise we may no longer be able to get its variables -- see: https://www.brainwy.com/tracker/PyDev/776). + del thread.__pydevd_id__ + except AttributeError: + pass + + +# Don't let threads be collected (so that id(thread) is guaranteed to be unique). +_thread_id_to_thread_found = {} + + +def _get_or_compute_thread_id_with_lock(thread, is_current_thread): + with _thread_id_lock: + # We do a new check with the lock in place just to be sure that nothing changed + tid = getattr(thread, '__pydevd_id__', None) + if tid is not None: + return tid + + _thread_id_to_thread_found[id(thread)] = thread + + # Note: don't use thread.ident because a new thread may have the + # same id from an old thread. + pid = get_pid() + tid = 'pid_%s_id_%s' % (pid, id(thread)) + + thread.__pydevd_id__ = tid + + return tid + + +def get_current_thread_id(thread): + ''' + Note: the difference from get_current_thread_id to get_thread_id is that + for the current thread we can get the thread id while the thread.ident + is still not set in the Thread instance. + ''' + try: + # Fast path without getting lock. + tid = thread.__pydevd_id__ + if tid is None: + # Fix for https://www.brainwy.com/tracker/PyDev/645 + # if __pydevd_id__ is None, recalculate it... also, use an heuristic + # that gives us always the same id for the thread (using thread.ident or id(thread)). + raise AttributeError() + except AttributeError: + tid = _get_or_compute_thread_id_with_lock(thread, is_current_thread=True) + + return tid + + +def get_thread_id(thread): + try: + # Fast path without getting lock. + tid = thread.__pydevd_id__ + if tid is None: + # Fix for https://www.brainwy.com/tracker/PyDev/645 + # if __pydevd_id__ is None, recalculate it... also, use an heuristic + # that gives us always the same id for the thread (using thread.ident or id(thread)). + raise AttributeError() + except AttributeError: + tid = _get_or_compute_thread_id_with_lock(thread, is_current_thread=False) + + return tid + + +def set_thread_id(thread, thread_id): + with _thread_id_lock: + thread.__pydevd_id__ = thread_id + + +#======================================================================================================================= +# Null +#======================================================================================================================= +class Null: + """ + Gotten from: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205 + """ + + def __init__(self, *args, **kwargs): + return None + + def __call__(self, *args, **kwargs): + return self + + def __enter__(self, *args, **kwargs): + return self + + def __exit__(self, *args, **kwargs): + return self + + def __getattr__(self, mname): + if len(mname) > 4 and mname[:2] == '__' and mname[-2:] == '__': + # Don't pretend to implement special method names. + raise AttributeError(mname) + return self + + def __setattr__(self, name, value): + return self + + def __delattr__(self, name): + return self + + def __repr__(self): + return "" + + def __str__(self): + return "Null" + + def __len__(self): + return 0 + + def __getitem__(self): + return self + + def __setitem__(self, *args, **kwargs): + pass + + def write(self, *args, **kwargs): + pass + + def __nonzero__(self): + return 0 + + def __iter__(self): + return iter(()) + + +# Default instance +NULL = Null() + + +class KeyifyList(object): + + def __init__(self, inner, key): + self.inner = inner + self.key = key + + def __len__(self): + return len(self.inner) + + def __getitem__(self, k): + return self.key(self.inner[k]) + + +def call_only_once(func): + ''' + To be used as a decorator + + @call_only_once + def func(): + print 'Calling func only this time' + + Actually, in PyDev it must be called as: + + func = call_only_once(func) to support older versions of Python. + ''' + + def new_func(*args, **kwargs): + if not new_func._called: + new_func._called = True + return func(*args, **kwargs) + + new_func._called = False + return new_func + + +# Protocol where each line is a new message (text is quoted to prevent new lines). +# payload is xml +QUOTED_LINE_PROTOCOL = 'quoted-line' +ARGUMENT_QUOTED_LINE_PROTOCOL = 'protocol-quoted-line' + +# Uses http protocol to provide a new message. +# i.e.: Content-Length:xxx\r\n\r\npayload +# payload is xml +HTTP_PROTOCOL = 'http' +ARGUMENT_HTTP_PROTOCOL = 'protocol-http' + +# Message is sent without any header. +# payload is json +JSON_PROTOCOL = 'json' +ARGUMENT_JSON_PROTOCOL = 'json-dap' + +# Same header as the HTTP_PROTOCOL +# payload is json +HTTP_JSON_PROTOCOL = 'http_json' +ARGUMENT_HTTP_JSON_PROTOCOL = 'json-dap-http' + +ARGUMENT_PPID = 'ppid' + + +class _GlobalSettings: + protocol = QUOTED_LINE_PROTOCOL + + +def set_protocol(protocol): + expected = (HTTP_PROTOCOL, QUOTED_LINE_PROTOCOL, JSON_PROTOCOL, HTTP_JSON_PROTOCOL) + assert protocol in expected, 'Protocol (%s) should be one of: %s' % ( + protocol, expected) + + _GlobalSettings.protocol = protocol + + +def get_protocol(): + return _GlobalSettings.protocol + + +def is_json_protocol(): + return _GlobalSettings.protocol in (JSON_PROTOCOL, HTTP_JSON_PROTOCOL) + + +class GlobalDebuggerHolder: + ''' + Holder for the global debugger. + ''' + global_dbg = None # Note: don't rename (the name is used in our attach to process) + + +def get_global_debugger(): + return GlobalDebuggerHolder.global_dbg + + +GetGlobalDebugger = get_global_debugger # Backward-compatibility + + +def set_global_debugger(dbg): + GlobalDebuggerHolder.global_dbg = dbg + + +if __name__ == '__main__': + if Null(): + sys.stdout.write('here\n') + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_custom_frames.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_custom_frames.py new file mode 100644 index 0000000..66e400f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_custom_frames.py @@ -0,0 +1,116 @@ +from _pydevd_bundle.pydevd_constants import get_current_thread_id, Null, ForkSafeLock +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame +from _pydev_bundle._pydev_saved_modules import thread, threading +import sys +from _pydev_bundle import pydev_log + +DEBUG = False + + +class CustomFramesContainer: + + # Actual Values initialized later on. + custom_frames_lock = None # : :type custom_frames_lock: threading.Lock + + custom_frames = None + + _next_frame_id = None + + _py_db_command_thread_event = None + + +def custom_frames_container_init(): # Note: no staticmethod on jython 2.1 (so, use free-function) + + CustomFramesContainer.custom_frames_lock = ForkSafeLock() + + # custom_frames can only be accessed if properly locked with custom_frames_lock! + # Key is a string identifying the frame (as well as the thread it belongs to). + # Value is a CustomFrame. + # + CustomFramesContainer.custom_frames = {} + + # Only to be used in this module + CustomFramesContainer._next_frame_id = 0 + + # This is the event we must set to release an internal process events. It's later set by the actual debugger + # when we do create the debugger. + CustomFramesContainer._py_db_command_thread_event = Null() + + +# Initialize it the first time (it may be reinitialized later on when dealing with a fork). +custom_frames_container_init() + + +class CustomFrame: + + def __init__(self, name, frame, thread_id): + # 0 = string with the representation of that frame + self.name = name + + # 1 = the frame to show + self.frame = frame + + # 2 = an integer identifying the last time the frame was changed. + self.mod_time = 0 + + # 3 = the thread id of the given frame + self.thread_id = thread_id + + +def add_custom_frame(frame, name, thread_id): + ''' + It's possible to show paused frames by adding a custom frame through this API (it's + intended to be used for coroutines, but could potentially be used for generators too). + + :param frame: + The topmost frame to be shown paused when a thread with thread.ident == thread_id is paused. + + :param name: + The name to be shown for the custom thread in the UI. + + :param thread_id: + The thread id to which this frame is related (must match thread.ident). + + :return: str + Returns the custom thread id which will be used to show the given frame paused. + ''' + with CustomFramesContainer.custom_frames_lock: + curr_thread_id = get_current_thread_id(threading.current_thread()) + next_id = CustomFramesContainer._next_frame_id = CustomFramesContainer._next_frame_id + 1 + + # Note: the frame id kept contains an id and thread information on the thread where the frame was added + # so that later on we can check if the frame is from the current thread by doing frame_id.endswith('|'+thread_id). + frame_custom_thread_id = '__frame__:%s|%s' % (next_id, curr_thread_id) + if DEBUG: + sys.stderr.write('add_custom_frame: %s (%s) %s %s\n' % ( + frame_custom_thread_id, get_abs_path_real_path_and_base_from_frame(frame)[-1], frame.f_lineno, frame.f_code.co_name)) + + CustomFramesContainer.custom_frames[frame_custom_thread_id] = CustomFrame(name, frame, thread_id) + CustomFramesContainer._py_db_command_thread_event.set() + return frame_custom_thread_id + + +def update_custom_frame(frame_custom_thread_id, frame, thread_id, name=None): + with CustomFramesContainer.custom_frames_lock: + if DEBUG: + sys.stderr.write('update_custom_frame: %s\n' % frame_custom_thread_id) + try: + old = CustomFramesContainer.custom_frames[frame_custom_thread_id] + if name is not None: + old.name = name + old.mod_time += 1 + old.thread_id = thread_id + except: + sys.stderr.write('Unable to get frame to replace: %s\n' % (frame_custom_thread_id,)) + pydev_log.exception() + + CustomFramesContainer._py_db_command_thread_event.set() + + +def remove_custom_frame(frame_custom_thread_id): + with CustomFramesContainer.custom_frames_lock: + if DEBUG: + sys.stderr.write('remove_custom_frame: %s\n' % frame_custom_thread_id) + CustomFramesContainer.custom_frames.pop(frame_custom_thread_id, None) + CustomFramesContainer._py_db_command_thread_event.set() + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c new file mode 100644 index 0000000..e50c22c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c @@ -0,0 +1,43164 @@ +/* Generated by Cython 0.29.32 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "depends": [], + "name": "_pydevd_bundle.pydevd_cython", + "sources": [ + "_pydevd_bundle/pydevd_cython.pyx" + ] + }, + "module_name": "_pydevd_bundle.pydevd_cython" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#include "Python.h" +#if PY_VERSION_HEX >= 0x03090000 +#include "internal/pycore_gc.h" +#include "internal/pycore_interp.h" +#endif + +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_32" +#define CYTHON_HEX_VERSION 0x001D20F0 +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900) + #endif +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif +#elif defined(PY_NOGIL) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_NOGIL 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #ifndef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #if PY_VERSION_HEX >= 0x030B00A4 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #elif !defined(CYTHON_FAST_THREAD_STATE) + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030A0000) + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #if PY_VERSION_HEX >= 0x030B00A4 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if PY_VERSION_HEX >= 0x030B00A1 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL; + PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL; + const char *fn_cstr=NULL; + const char *name_cstr=NULL; + PyCodeObject* co=NULL; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + if (!(kwds=PyDict_New())) goto end; + if (!(argcount=PyLong_FromLong(a))) goto end; + if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end; + if (!(posonlyargcount=PyLong_FromLong(0))) goto end; + if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end; + if (!(kwonlyargcount=PyLong_FromLong(k))) goto end; + if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end; + if (!(nlocals=PyLong_FromLong(l))) goto end; + if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end; + if (!(stacksize=PyLong_FromLong(s))) goto end; + if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end; + if (!(flags=PyLong_FromLong(f))) goto end; + if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end; + if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end; + if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end; + if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end; + if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too; + if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here + if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too; + Py_XDECREF((PyObject*)co); + co = (PyCodeObject*)call_result; + call_result = NULL; + if (0) { + cleanup_code_too: + Py_XDECREF((PyObject*)co); + co = NULL; + } + end: + Py_XDECREF(kwds); + Py_XDECREF(argcount); + Py_XDECREF(posonlyargcount); + Py_XDECREF(kwonlyargcount); + Py_XDECREF(nlocals); + Py_XDECREF(stacksize); + Py_XDECREF(replace); + Py_XDECREF(call_result); + Py_XDECREF(empty); + if (type) { + PyErr_Restore(type, value, traceback); + } + return co; + } +#else + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if defined(PyUnicode_IS_READY) + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #else + #define __Pyx_PyUnicode_READY(op) (0) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) + #if !defined(_USE_MATH_DEFINES) + #define _USE_MATH_DEFINES + #endif +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE___pydevd_bundle__pydevd_cython +#define __PYX_HAVE_API___pydevd_bundle__pydevd_cython +/* Early includes */ +#include +#include +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "_pydevd_bundle/pydevd_cython.pyx", + "_pydevd_bundle/pydevd_cython.pxd", + "stringsource", + "type.pxd", +}; + +/*--- Type declarations ---*/ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo; +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj; +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame; +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper; +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions; +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame; +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer; + +/* "_pydevd_bundle/pydevd_cython.pxd":1 + * cdef class PyDBAdditionalThreadInfo: # <<<<<<<<<<<<<< + * cdef public int pydev_state + * cdef public object pydev_step_stop # Actually, it's a frame or None + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo { + PyObject_HEAD + int pydev_state; + PyObject *pydev_step_stop; + int pydev_original_step_cmd; + int pydev_step_cmd; + int pydev_notify_kill; + PyObject *pydev_smart_step_stop; + int pydev_django_resolve_frame; + PyObject *pydev_call_from_jinja2; + PyObject *pydev_call_inside_jinja2; + int is_tracing; + PyObject *conditional_breakpoint_exception; + PyObject *pydev_message; + int suspend_type; + int pydev_next_line; + PyObject *pydev_func_name; + int suspended_at_unhandled; + PyObject *trace_suspend_type; + PyObject *top_level_thread_tracer_no_back_frames; + PyObject *top_level_thread_tracer_unhandled; + PyObject *thread_tracer; + PyObject *step_in_initial_location; + int pydev_smart_parent_offset; + int pydev_smart_child_offset; + PyObject *pydev_smart_step_into_variants; + PyObject *target_id_to_smart_step_into_variant; + int pydev_use_scoped_step_frame; +}; + + +/* "_pydevd_bundle/pydevd_cython.pyx":255 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class _TryExceptContainerObj: # <<<<<<<<<<<<<< + * cdef public list try_except_infos; + * def __init__(self): + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj { + PyObject_HEAD + PyObject *try_except_infos; +}; + + +/* "_pydevd_bundle/pydevd_cython.pyx":273 + * #======================================================================================================================= + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class PyDBFrame: # <<<<<<<<<<<<<< + * # ELSE + * # class PyDBFrame: + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame { + PyObject_HEAD + struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_vtab; + PyObject *_args; + int should_skip; + PyObject *exc_info; +}; + + +/* "_pydevd_bundle/pydevd_cython.pyx":1434 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class SafeCallWrapper: # <<<<<<<<<<<<<< + * cdef method_object + * def __init__(self, method_object): + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper { + PyObject_HEAD + PyObject *method_object; +}; + + +/* "_pydevd_bundle/pydevd_cython.pyx":1590 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class TopLevelThreadTracerOnlyUnhandledExceptions: # <<<<<<<<<<<<<< + * cdef public tuple _args; + * def __init__(self, tuple args): + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions { + PyObject_HEAD + PyObject *_args; +}; + + +/* "_pydevd_bundle/pydevd_cython.pyx":1620 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class TopLevelThreadTracerNoBackFrame: # <<<<<<<<<<<<<< + * cdef public object _frame_trace_dispatch; + * cdef public tuple _args; + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame { + PyObject_HEAD + PyObject *_frame_trace_dispatch; + PyObject *_args; + PyObject *try_except_infos; + PyObject *_last_exc_arg; + PyObject *_raise_lines; + int _last_raise_line; +}; + + +/* "_pydevd_bundle/pydevd_cython.pyx":1695 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class ThreadTracer: # <<<<<<<<<<<<<< + * cdef public tuple _args; + * def __init__(self, tuple args): + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer { + PyObject_HEAD + PyObject *_args; +}; + + + +/* "_pydevd_bundle/pydevd_cython.pyx":273 + * #======================================================================================================================= + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class PyDBFrame: # <<<<<<<<<<<<<< + * # ELSE + * # class PyDBFrame: + */ + +struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame { + PyObject *(*_should_stop_on_exception)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *, PyObject *); + PyObject *(*_handle_exception)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *, PyObject *, PyObject *); + PyObject *(*get_func_name)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *); + PyObject *(*_show_return_values)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *); + PyObject *(*_remove_return_values)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *); + PyObject *(*_get_unfiltered_back_frame)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *); + PyObject *(*_is_same_frame)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *); + PyObject *(*trace_dispatch)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *, PyObject *, int __pyx_skip_dispatch); +}; +static struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_vtabptr_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if CYTHON_FAST_PYCALL + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" +#if PY_VERSION_HEX >= 0x030b00a6 + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif // CYTHON_FAST_PYCALL +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallNoArg.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) +#endif + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyObjectCall2Args.proto */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* GetTopmostException.proto */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* PyObjectLookupSpecial.proto */ +#if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name) { + PyObject *res; + PyTypeObject *tp = Py_TYPE(obj); +#if PY_MAJOR_VERSION < 3 + if (unlikely(PyInstance_Check(obj))) + return __Pyx_PyObject_GetAttrStr(obj, attr_name); +#endif + res = _PyType_Lookup(tp, attr_name); + if (likely(res)) { + descrgetfunc f = Py_TYPE(res)->tp_descr_get; + if (!f) { + Py_INCREF(res); + } else { + res = f(res, obj, (PyObject *)tp); + } + } else { + PyErr_SetObject(PyExc_AttributeError, attr_name); + } + return res; +} +#else +#define __Pyx_PyObject_LookupSpecial(o,n) __Pyx_PyObject_GetAttrStr(o,n) +#endif + +/* PyObjectSetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL) +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value); +#else +#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n) +#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v) +#endif + +/* None.proto */ +static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname); + +/* pyfrozenset_new.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it); + +/* PySetContains.proto */ +static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq); + +/* ListAppend.proto */ +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { + Py_INCREF(x); + PyList_SET_ITEM(list, len, x); + __Pyx_SET_SIZE(list, len + 1); + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) +#endif + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* ArgTypeTest.proto */ +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ + ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\ + __Pyx__ArgTypeTest(obj, type, name, exact)) +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* StrEquals.proto */ +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals +#else +#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals +#endif + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* IterFinish.proto */ +static CYTHON_INLINE int __Pyx_IterFinish(void); + +/* UnpackItemEndCheck.proto */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* dict_getitem_default.proto */ +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value); + +/* UnpackUnboundCMethod.proto */ +typedef struct { + PyObject *type; + PyObject **method_name; + PyCFunction func; + PyObject *method; + int flag; +} __Pyx_CachedCFunction; + +/* CallUnboundCMethod1.proto */ +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#else +#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) +#endif + +/* CallUnboundCMethod2.proto */ +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2); +#else +#define __Pyx_CallUnboundCMethod2(cfunc, self, arg1, arg2) __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2) +#endif + +/* py_dict_clear.proto */ +#define __Pyx_PyDict_Clear(d) (PyDict_Clear(d), 0) + +/* PyDictContains.proto */ +static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { + int result = PyDict_Contains(dict, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* SwapException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* RaiseNoneIterError.proto */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +/* PyIntBinop.proto */ +#if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_AndObjC(PyObject *op1, PyObject *op2, long intval, int inplace, int zerodivision_check); +#else +#define __Pyx_PyInt_AndObjC(op1, op2, intval, inplace, zerodivision_check)\ + (inplace ? PyNumber_InPlaceAnd(op1, op2) : PyNumber_And(op1, op2)) +#endif + +/* PyObjectGetMethod.proto */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); + +/* PyObjectCallMethod0.proto */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); + +/* UnpackTupleError.proto */ +static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); + +/* UnpackTuple2.proto */ +#define __Pyx_unpack_tuple2(tuple, value1, value2, is_tuple, has_known_size, decref_tuple)\ + (likely(is_tuple || PyTuple_Check(tuple)) ?\ + (likely(has_known_size || PyTuple_GET_SIZE(tuple) == 2) ?\ + __Pyx_unpack_tuple2_exact(tuple, value1, value2, decref_tuple) :\ + (__Pyx_UnpackTupleError(tuple, 2), -1)) :\ + __Pyx_unpack_tuple2_generic(tuple, value1, value2, has_known_size, decref_tuple)) +static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( + PyObject* tuple, PyObject** value1, PyObject** value2, int decref_tuple); +static int __Pyx_unpack_tuple2_generic( + PyObject* tuple, PyObject** value1, PyObject** value2, int has_known_size, int decref_tuple); + +/* dict_iter.proto */ +static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name, + Py_ssize_t* p_orig_length, int* p_is_dict); +static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos, + PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict); + +/* py_dict_values.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d); + +/* CallUnboundCMethod0.proto */ +static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CallUnboundCMethod0(cfunc, self)\ + (likely((cfunc)->func) ?\ + (likely((cfunc)->flag == METH_NOARGS) ? (*((cfunc)->func))(self, NULL) :\ + (PY_VERSION_HEX >= 0x030600B1 && likely((cfunc)->flag == METH_FASTCALL) ?\ + (PY_VERSION_HEX >= 0x030700A0 ?\ + (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0) :\ + (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0, NULL)) :\ + (PY_VERSION_HEX >= 0x030700A0 && (cfunc)->flag == (METH_FASTCALL | METH_KEYWORDS) ?\ + (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0, NULL) :\ + (likely((cfunc)->flag == (METH_VARARGS | METH_KEYWORDS)) ? ((*(PyCFunctionWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, __pyx_empty_tuple, NULL)) :\ + ((cfunc)->flag == METH_VARARGS ? (*((cfunc)->func))(self, __pyx_empty_tuple) :\ + __Pyx__CallUnboundCMethod0(cfunc, self)))))) :\ + __Pyx__CallUnboundCMethod0(cfunc, self)) +#else +#define __Pyx_CallUnboundCMethod0(cfunc, self) __Pyx__CallUnboundCMethod0(cfunc, self) +#endif + +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/* SliceObject.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice( + PyObject* obj, Py_ssize_t cstart, Py_ssize_t cstop, + PyObject** py_start, PyObject** py_stop, PyObject** py_slice, + int has_cstart, int has_cstop, int wraparound); + +/* PyIntBinop.proto */ +#if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_AddObjC(PyObject *op1, PyObject *op2, long intval, int inplace, int zerodivision_check); +#else +#define __Pyx_PyInt_AddObjC(op1, op2, intval, inplace, zerodivision_check)\ + (inplace ? PyNumber_InPlaceAdd(op1, op2) : PyNumber_Add(op1, op2)) +#endif + +/* PyObjectCallMethod1.proto */ +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); + +/* append.proto */ +static CYTHON_INLINE int __Pyx_PyObject_Append(PyObject* L, PyObject* x); + +/* SliceTupleAndList.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyList_GetSlice(PyObject* src, Py_ssize_t start, Py_ssize_t stop); +static CYTHON_INLINE PyObject* __Pyx_PyTuple_GetSlice(PyObject* src, Py_ssize_t start, Py_ssize_t stop); +#else +#define __Pyx_PyList_GetSlice(seq, start, stop) PySequence_GetSlice(seq, start, stop) +#define __Pyx_PyTuple_GetSlice(seq, start, stop) PySequence_GetSlice(seq, start, stop) +#endif + +/* PyIntCompare.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, long intval, long inplace); + +/* ObjectGetItem.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key); +#else +#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) +#endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* SetupReduce.proto */ +static int __Pyx_setup_reduce(PyObject* type_obj); + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable); + +/* TypeImport.proto */ +#ifndef __PYX_HAVE_RT_ImportType_proto +#define __PYX_HAVE_RT_ImportType_proto +enum __Pyx_ImportType_CheckSize { + __Pyx_ImportType_CheckSize_Error = 0, + __Pyx_ImportType_CheckSize_Warn = 1, + __Pyx_ImportType_CheckSize_Ignore = 2 +}; +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); +#endif + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* GCCDiagnostics.proto */ +#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__should_stop_on_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, CYTHON_UNUSED PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__handle_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg, PyObject *__pyx_v_exception_type); /* proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_get_func_name(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame); /* proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__show_return_values(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_arg); /* proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__remove_return_values(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_main_debugger, PyObject *__pyx_v_frame); /* proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__get_unfiltered_back_frame(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_main_debugger, PyObject *__pyx_v_frame); /* proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__is_same_frame(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_target_frame, PyObject *__pyx_v_current_frame); /* proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispatch(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg, int __pyx_skip_dispatch); /* proto*/ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from '_pydevd_bundle.pydevd_cython' */ +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo = 0; +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj = 0; +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame = 0; +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper = 0; +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions = 0; +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame = 0; +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer = 0; +static PyObject *__pyx_v_14_pydevd_bundle_13pydevd_cython__global_notify_skipped_step_in = 0; +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_is_unhandled_exception(PyObject *, PyObject *, PyObject *, int, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBAdditionalThreadInfo__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle__TryExceptContainerObj__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBFrame__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_SafeCallWrapper__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_ThreadTracer__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *, PyObject *); /*proto*/ +#define __Pyx_MODULE_NAME "_pydevd_bundle.pydevd_cython" +extern int __pyx_module_is_main__pydevd_bundle__pydevd_cython; +int __pyx_module_is_main__pydevd_bundle__pydevd_cython = 0; + +/* Implementation of '_pydevd_bundle.pydevd_cython' */ +static PyObject *__pyx_builtin_ImportError; +static PyObject *__pyx_builtin_NameError; +static PyObject *__pyx_builtin_StopIteration; +static PyObject *__pyx_builtin_id; +static PyObject *__pyx_builtin_AttributeError; +static PyObject *__pyx_builtin_SystemExit; +static PyObject *__pyx_builtin_GeneratorExit; +static PyObject *__pyx_builtin_KeyboardInterrupt; +static const char __pyx_k_[] = ""; +static const char __pyx_k_1[] = "1"; +static const char __pyx_k_i[] = "i"; +static const char __pyx_k_j[] = "j"; +static const char __pyx_k_t[] = "t"; +static const char __pyx_k__3[] = "?"; +static const char __pyx_k__5[] = "/"; +static const char __pyx_k__6[] = "\\"; +static const char __pyx_k__7[] = "."; +static const char __pyx_k_id[] = "id"; +static const char __pyx_k_os[] = "os"; +static const char __pyx_k_re[] = "re"; +static const char __pyx_k_ALL[] = "ALL"; +static const char __pyx_k_add[] = "add"; +static const char __pyx_k_arg[] = "arg"; +static const char __pyx_k_dis[] = "dis"; +static const char __pyx_k_get[] = "get"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_pop[] = "pop"; +static const char __pyx_k_pyc[] = ".pyc"; +static const char __pyx_k_run[] = "run"; +static const char __pyx_k_s_s[] = "%s.%s"; +static const char __pyx_k_None[] = "None"; +static const char __pyx_k_args[] = "args"; +static const char __pyx_k_call[] = "call"; +static const char __pyx_k_cell[] = " 0)) { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} + if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__init__", 0))) return -1; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":68 + * + * def __init__(self): + * self.pydev_state = STATE_RUN # STATE_RUN or STATE_SUSPEND # <<<<<<<<<<<<<< + * self.pydev_step_stop = None + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_STATE_RUN); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 68, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 68, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_self->pydev_state = __pyx_t_2; + + /* "_pydevd_bundle/pydevd_cython.pyx":69 + * def __init__(self): + * self.pydev_state = STATE_RUN # STATE_RUN or STATE_SUSPEND + * self.pydev_step_stop = None # <<<<<<<<<<<<<< + * + * # Note: we have `pydev_original_step_cmd` and `pydev_step_cmd` because the original is to + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_step_stop); + __pyx_v_self->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":77 + * # method the strategy is changed to a step in). + * + * self.pydev_original_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. # <<<<<<<<<<<<<< + * self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + * + */ + __pyx_v_self->pydev_original_step_cmd = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":78 + * + * self.pydev_original_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + * self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. # <<<<<<<<<<<<<< + * + * self.pydev_notify_kill = False + */ + __pyx_v_self->pydev_step_cmd = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":80 + * self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + * + * self.pydev_notify_kill = False # <<<<<<<<<<<<<< + * self.pydev_django_resolve_frame = False + * self.pydev_call_from_jinja2 = None + */ + __pyx_v_self->pydev_notify_kill = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":81 + * + * self.pydev_notify_kill = False + * self.pydev_django_resolve_frame = False # <<<<<<<<<<<<<< + * self.pydev_call_from_jinja2 = None + * self.pydev_call_inside_jinja2 = None + */ + __pyx_v_self->pydev_django_resolve_frame = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":82 + * self.pydev_notify_kill = False + * self.pydev_django_resolve_frame = False + * self.pydev_call_from_jinja2 = None # <<<<<<<<<<<<<< + * self.pydev_call_inside_jinja2 = None + * self.is_tracing = 0 + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_call_from_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_from_jinja2); + __pyx_v_self->pydev_call_from_jinja2 = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":83 + * self.pydev_django_resolve_frame = False + * self.pydev_call_from_jinja2 = None + * self.pydev_call_inside_jinja2 = None # <<<<<<<<<<<<<< + * self.is_tracing = 0 + * self.conditional_breakpoint_exception = None + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_call_inside_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_inside_jinja2); + __pyx_v_self->pydev_call_inside_jinja2 = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":84 + * self.pydev_call_from_jinja2 = None + * self.pydev_call_inside_jinja2 = None + * self.is_tracing = 0 # <<<<<<<<<<<<<< + * self.conditional_breakpoint_exception = None + * self.pydev_message = '' + */ + __pyx_v_self->is_tracing = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":85 + * self.pydev_call_inside_jinja2 = None + * self.is_tracing = 0 + * self.conditional_breakpoint_exception = None # <<<<<<<<<<<<<< + * self.pydev_message = '' + * self.suspend_type = PYTHON_SUSPEND + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->conditional_breakpoint_exception); + __Pyx_DECREF(__pyx_v_self->conditional_breakpoint_exception); + __pyx_v_self->conditional_breakpoint_exception = ((PyObject*)Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":86 + * self.is_tracing = 0 + * self.conditional_breakpoint_exception = None + * self.pydev_message = '' # <<<<<<<<<<<<<< + * self.suspend_type = PYTHON_SUSPEND + * self.pydev_next_line = -1 + */ + __Pyx_INCREF(__pyx_kp_s_); + __Pyx_GIVEREF(__pyx_kp_s_); + __Pyx_GOTREF(__pyx_v_self->pydev_message); + __Pyx_DECREF(__pyx_v_self->pydev_message); + __pyx_v_self->pydev_message = __pyx_kp_s_; + + /* "_pydevd_bundle/pydevd_cython.pyx":87 + * self.conditional_breakpoint_exception = None + * self.pydev_message = '' + * self.suspend_type = PYTHON_SUSPEND # <<<<<<<<<<<<<< + * self.pydev_next_line = -1 + * self.pydev_func_name = '.invalid.' # Must match the type in cython + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_PYTHON_SUSPEND); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 87, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 87, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_self->suspend_type = __pyx_t_2; + + /* "_pydevd_bundle/pydevd_cython.pyx":88 + * self.pydev_message = '' + * self.suspend_type = PYTHON_SUSPEND + * self.pydev_next_line = -1 # <<<<<<<<<<<<<< + * self.pydev_func_name = '.invalid.' # Must match the type in cython + * self.suspended_at_unhandled = False + */ + __pyx_v_self->pydev_next_line = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":89 + * self.suspend_type = PYTHON_SUSPEND + * self.pydev_next_line = -1 + * self.pydev_func_name = '.invalid.' # Must match the type in cython # <<<<<<<<<<<<<< + * self.suspended_at_unhandled = False + * self.trace_suspend_type = 'trace' # 'trace' or 'frame_eval' + */ + __Pyx_INCREF(__pyx_kp_s_invalid); + __Pyx_GIVEREF(__pyx_kp_s_invalid); + __Pyx_GOTREF(__pyx_v_self->pydev_func_name); + __Pyx_DECREF(__pyx_v_self->pydev_func_name); + __pyx_v_self->pydev_func_name = __pyx_kp_s_invalid; + + /* "_pydevd_bundle/pydevd_cython.pyx":90 + * self.pydev_next_line = -1 + * self.pydev_func_name = '.invalid.' # Must match the type in cython + * self.suspended_at_unhandled = False # <<<<<<<<<<<<<< + * self.trace_suspend_type = 'trace' # 'trace' or 'frame_eval' + * self.top_level_thread_tracer_no_back_frames = [] + */ + __pyx_v_self->suspended_at_unhandled = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":91 + * self.pydev_func_name = '.invalid.' # Must match the type in cython + * self.suspended_at_unhandled = False + * self.trace_suspend_type = 'trace' # 'trace' or 'frame_eval' # <<<<<<<<<<<<<< + * self.top_level_thread_tracer_no_back_frames = [] + * self.top_level_thread_tracer_unhandled = None + */ + __Pyx_INCREF(__pyx_n_s_trace); + __Pyx_GIVEREF(__pyx_n_s_trace); + __Pyx_GOTREF(__pyx_v_self->trace_suspend_type); + __Pyx_DECREF(__pyx_v_self->trace_suspend_type); + __pyx_v_self->trace_suspend_type = __pyx_n_s_trace; + + /* "_pydevd_bundle/pydevd_cython.pyx":92 + * self.suspended_at_unhandled = False + * self.trace_suspend_type = 'trace' # 'trace' or 'frame_eval' + * self.top_level_thread_tracer_no_back_frames = [] # <<<<<<<<<<<<<< + * self.top_level_thread_tracer_unhandled = None + * self.thread_tracer = None + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->top_level_thread_tracer_no_back_frames); + __Pyx_DECREF(__pyx_v_self->top_level_thread_tracer_no_back_frames); + __pyx_v_self->top_level_thread_tracer_no_back_frames = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":93 + * self.trace_suspend_type = 'trace' # 'trace' or 'frame_eval' + * self.top_level_thread_tracer_no_back_frames = [] + * self.top_level_thread_tracer_unhandled = None # <<<<<<<<<<<<<< + * self.thread_tracer = None + * self.step_in_initial_location = None + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->top_level_thread_tracer_unhandled); + __Pyx_DECREF(__pyx_v_self->top_level_thread_tracer_unhandled); + __pyx_v_self->top_level_thread_tracer_unhandled = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":94 + * self.top_level_thread_tracer_no_back_frames = [] + * self.top_level_thread_tracer_unhandled = None + * self.thread_tracer = None # <<<<<<<<<<<<<< + * self.step_in_initial_location = None + * self.pydev_smart_parent_offset = -1 + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->thread_tracer); + __Pyx_DECREF(__pyx_v_self->thread_tracer); + __pyx_v_self->thread_tracer = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":95 + * self.top_level_thread_tracer_unhandled = None + * self.thread_tracer = None + * self.step_in_initial_location = None # <<<<<<<<<<<<<< + * self.pydev_smart_parent_offset = -1 + * self.pydev_smart_child_offset = -1 + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->step_in_initial_location); + __Pyx_DECREF(__pyx_v_self->step_in_initial_location); + __pyx_v_self->step_in_initial_location = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":96 + * self.thread_tracer = None + * self.step_in_initial_location = None + * self.pydev_smart_parent_offset = -1 # <<<<<<<<<<<<<< + * self.pydev_smart_child_offset = -1 + * self.pydev_smart_step_into_variants = () + */ + __pyx_v_self->pydev_smart_parent_offset = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":97 + * self.step_in_initial_location = None + * self.pydev_smart_parent_offset = -1 + * self.pydev_smart_child_offset = -1 # <<<<<<<<<<<<<< + * self.pydev_smart_step_into_variants = () + * self.target_id_to_smart_step_into_variant = {} + */ + __pyx_v_self->pydev_smart_child_offset = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":98 + * self.pydev_smart_parent_offset = -1 + * self.pydev_smart_child_offset = -1 + * self.pydev_smart_step_into_variants = () # <<<<<<<<<<<<<< + * self.target_id_to_smart_step_into_variant = {} + * + */ + __Pyx_INCREF(__pyx_empty_tuple); + __Pyx_GIVEREF(__pyx_empty_tuple); + __Pyx_GOTREF(__pyx_v_self->pydev_smart_step_into_variants); + __Pyx_DECREF(__pyx_v_self->pydev_smart_step_into_variants); + __pyx_v_self->pydev_smart_step_into_variants = __pyx_empty_tuple; + + /* "_pydevd_bundle/pydevd_cython.pyx":99 + * self.pydev_smart_child_offset = -1 + * self.pydev_smart_step_into_variants = () + * self.target_id_to_smart_step_into_variant = {} # <<<<<<<<<<<<<< + * + * # Flag to indicate ipython use-case where each line will be executed as a call/line/return + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->target_id_to_smart_step_into_variant); + __Pyx_DECREF(__pyx_v_self->target_id_to_smart_step_into_variant); + __pyx_v_self->target_id_to_smart_step_into_variant = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":111 + * # + * # See: https://github.com/microsoft/debugpy/issues/869#issuecomment-1132141003 + * self.pydev_use_scoped_step_frame = False # <<<<<<<<<<<<<< + * + * def get_topmost_frame(self, thread): + */ + __pyx_v_self->pydev_use_scoped_step_frame = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":67 + * # ENDIF + * + * def __init__(self): # <<<<<<<<<<<<<< + * self.pydev_state = STATE_RUN # STATE_RUN or STATE_SUSPEND + * self.pydev_step_stop = None + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":113 + * self.pydev_use_scoped_step_frame = False + * + * def get_topmost_frame(self, thread): # <<<<<<<<<<<<<< + * ''' + * Gets the topmost frame for the given thread. Note that it may be None + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_3get_topmost_frame(PyObject *__pyx_v_self, PyObject *__pyx_v_thread); /*proto*/ +static char __pyx_doc_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_2get_topmost_frame[] = "\n Gets the topmost frame for the given thread. Note that it may be None\n and callers should remove the reference to the frame as soon as possible\n to avoid disturbing user code.\n "; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_3get_topmost_frame(PyObject *__pyx_v_self, PyObject *__pyx_v_thread) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_topmost_frame (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_2get_topmost_frame(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_thread)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_2get_topmost_frame(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_thread) { + PyObject *__pyx_v_current_frames = NULL; + PyObject *__pyx_v_topmost_frame = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + PyObject *__pyx_t_10 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_topmost_frame", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":120 + * ''' + * # sys._current_frames(): dictionary with thread id -> topmost frame + * current_frames = _current_frames() # <<<<<<<<<<<<<< + * topmost_frame = current_frames.get(thread.ident) + * if topmost_frame is None: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_current_frames); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_current_frames = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":121 + * # sys._current_frames(): dictionary with thread id -> topmost frame + * current_frames = _current_frames() + * topmost_frame = current_frames.get(thread.ident) # <<<<<<<<<<<<<< + * if topmost_frame is None: + * # Note: this is expected for dummy threads (so, getting the topmost frame should be + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_current_frames, __pyx_n_s_get); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 121, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_thread, __pyx_n_s_ident); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 121, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_t_3) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 121, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_topmost_frame = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":122 + * current_frames = _current_frames() + * topmost_frame = current_frames.get(thread.ident) + * if topmost_frame is None: # <<<<<<<<<<<<<< + * # Note: this is expected for dummy threads (so, getting the topmost frame should be + * # treated as optional). + */ + __pyx_t_5 = (__pyx_v_topmost_frame == Py_None); + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { + + /* "_pydevd_bundle/pydevd_cython.pyx":125 + * # Note: this is expected for dummy threads (so, getting the topmost frame should be + * # treated as optional). + * pydev_log.info( # <<<<<<<<<<<<<< + * 'Unable to get topmost frame for thread: %s, thread.ident: %s, id(thread): %s\nCurrent frames: %s.\n' + * 'GEVENT_SUPPORT: %s', + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_info); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":129 + * 'GEVENT_SUPPORT: %s', + * thread, + * thread.ident, # <<<<<<<<<<<<<< + * id(thread), + * current_frames, + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_thread, __pyx_n_s_ident); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + + /* "_pydevd_bundle/pydevd_cython.pyx":130 + * thread, + * thread.ident, + * id(thread), # <<<<<<<<<<<<<< + * current_frames, + * SUPPORT_GEVENT, + */ + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_id, __pyx_v_thread); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + + /* "_pydevd_bundle/pydevd_cython.pyx":132 + * id(thread), + * current_frames, + * SUPPORT_GEVENT, # <<<<<<<<<<<<<< + * ) + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_SUPPORT_GEVENT); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = NULL; + __pyx_t_9 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_9 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[7] = {__pyx_t_8, __pyx_kp_s_Unable_to_get_topmost_frame_for, __pyx_v_thread, __pyx_t_2, __pyx_t_4, __pyx_v_current_frames, __pyx_t_7}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_9, 6+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[7] = {__pyx_t_8, __pyx_kp_s_Unable_to_get_topmost_frame_for, __pyx_v_thread, __pyx_t_2, __pyx_t_4, __pyx_v_current_frames, __pyx_t_7}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_9, 6+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_10 = PyTuple_New(6+__pyx_t_9); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_kp_s_Unable_to_get_topmost_frame_for); + __Pyx_GIVEREF(__pyx_kp_s_Unable_to_get_topmost_frame_for); + PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_9, __pyx_kp_s_Unable_to_get_topmost_frame_for); + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_9, __pyx_v_thread); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_10, 2+__pyx_t_9, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_10, 3+__pyx_t_9, __pyx_t_4); + __Pyx_INCREF(__pyx_v_current_frames); + __Pyx_GIVEREF(__pyx_v_current_frames); + PyTuple_SET_ITEM(__pyx_t_10, 4+__pyx_t_9, __pyx_v_current_frames); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_10, 5+__pyx_t_9, __pyx_t_7); + __pyx_t_2 = 0; + __pyx_t_4 = 0; + __pyx_t_7 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":122 + * current_frames = _current_frames() + * topmost_frame = current_frames.get(thread.ident) + * if topmost_frame is None: # <<<<<<<<<<<<<< + * # Note: this is expected for dummy threads (so, getting the topmost frame should be + * # treated as optional). + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":135 + * ) + * + * return topmost_frame # <<<<<<<<<<<<<< + * + * def __str__(self): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_topmost_frame); + __pyx_r = __pyx_v_topmost_frame; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":113 + * self.pydev_use_scoped_step_frame = False + * + * def get_topmost_frame(self, thread): # <<<<<<<<<<<<<< + * ''' + * Gets the topmost frame for the given thread. Note that it may be None + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.get_topmost_frame", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_current_frames); + __Pyx_XDECREF(__pyx_v_topmost_frame); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":137 + * return topmost_frame + * + * def __str__(self): # <<<<<<<<<<<<<< + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_5__str__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_5__str__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__str__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_4__str__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_4__str__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__str__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":138 + * + * def __str__(self): + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( # <<<<<<<<<<<<<< + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + * + */ + __Pyx_XDECREF(__pyx_r); + + /* "_pydevd_bundle/pydevd_cython.pyx":139 + * def __str__(self): + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_state); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_step_cmd); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_notify_kill); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); + __Pyx_INCREF(__pyx_v_self->pydev_step_stop); + __Pyx_GIVEREF(__pyx_v_self->pydev_step_stop); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_self->pydev_step_stop); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":138 + * + * def __str__(self): + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( # <<<<<<<<<<<<<< + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + * + */ + __pyx_t_3 = __Pyx_PyString_Format(__pyx_kp_s_State_s_Stop_s_Cmd_s_Kill_s, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 138, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":137 + * return topmost_frame + * + * def __str__(self): # <<<<<<<<<<<<<< + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.__str__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":2 + * cdef class PyDBAdditionalThreadInfo: + * cdef public int pydev_state # <<<<<<<<<<<<<< + * cdef public object pydev_step_stop # Actually, it's a frame or None + * cdef public int pydev_original_step_cmd + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_state); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_state.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 2, __pyx_L1_error) + __pyx_v_self->pydev_state = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_state.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":3 + * cdef class PyDBAdditionalThreadInfo: + * cdef public int pydev_state + * cdef public object pydev_step_stop # Actually, it's a frame or None # <<<<<<<<<<<<<< + * cdef public int pydev_original_step_cmd + * cdef public int pydev_step_cmd + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_step_stop); + __pyx_r = __pyx_v_self->pydev_step_stop; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->pydev_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_step_stop); + __pyx_v_self->pydev_step_stop = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_step_stop); + __pyx_v_self->pydev_step_stop = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":4 + * cdef public int pydev_state + * cdef public object pydev_step_stop # Actually, it's a frame or None + * cdef public int pydev_original_step_cmd # <<<<<<<<<<<<<< + * cdef public int pydev_step_cmd + * cdef public bint pydev_notify_kill + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_original_step_cmd); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_original_step_cmd.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 4, __pyx_L1_error) + __pyx_v_self->pydev_original_step_cmd = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_original_step_cmd.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":5 + * cdef public object pydev_step_stop # Actually, it's a frame or None + * cdef public int pydev_original_step_cmd + * cdef public int pydev_step_cmd # <<<<<<<<<<<<<< + * cdef public bint pydev_notify_kill + * cdef public object pydev_smart_step_stop # Actually, it's a frame or None + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_step_cmd); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_step_cmd.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 5, __pyx_L1_error) + __pyx_v_self->pydev_step_cmd = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_step_cmd.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":6 + * cdef public int pydev_original_step_cmd + * cdef public int pydev_step_cmd + * cdef public bint pydev_notify_kill # <<<<<<<<<<<<<< + * cdef public object pydev_smart_step_stop # Actually, it's a frame or None + * cdef public bint pydev_django_resolve_frame + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_notify_kill); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_notify_kill.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 6, __pyx_L1_error) + __pyx_v_self->pydev_notify_kill = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_notify_kill.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":7 + * cdef public int pydev_step_cmd + * cdef public bint pydev_notify_kill + * cdef public object pydev_smart_step_stop # Actually, it's a frame or None # <<<<<<<<<<<<<< + * cdef public bint pydev_django_resolve_frame + * cdef public object pydev_call_from_jinja2 + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_smart_step_stop); + __pyx_r = __pyx_v_self->pydev_smart_step_stop; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->pydev_smart_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_smart_step_stop); + __pyx_v_self->pydev_smart_step_stop = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_smart_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_smart_step_stop); + __pyx_v_self->pydev_smart_step_stop = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":8 + * cdef public bint pydev_notify_kill + * cdef public object pydev_smart_step_stop # Actually, it's a frame or None + * cdef public bint pydev_django_resolve_frame # <<<<<<<<<<<<<< + * cdef public object pydev_call_from_jinja2 + * cdef public object pydev_call_inside_jinja2 + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_django_resolve_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_django_resolve_frame.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 8, __pyx_L1_error) + __pyx_v_self->pydev_django_resolve_frame = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_django_resolve_frame.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":9 + * cdef public object pydev_smart_step_stop # Actually, it's a frame or None + * cdef public bint pydev_django_resolve_frame + * cdef public object pydev_call_from_jinja2 # <<<<<<<<<<<<<< + * cdef public object pydev_call_inside_jinja2 + * cdef public int is_tracing + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_call_from_jinja2); + __pyx_r = __pyx_v_self->pydev_call_from_jinja2; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->pydev_call_from_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_from_jinja2); + __pyx_v_self->pydev_call_from_jinja2 = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_call_from_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_from_jinja2); + __pyx_v_self->pydev_call_from_jinja2 = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":10 + * cdef public bint pydev_django_resolve_frame + * cdef public object pydev_call_from_jinja2 + * cdef public object pydev_call_inside_jinja2 # <<<<<<<<<<<<<< + * cdef public int is_tracing + * cdef public tuple conditional_breakpoint_exception + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_call_inside_jinja2); + __pyx_r = __pyx_v_self->pydev_call_inside_jinja2; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->pydev_call_inside_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_inside_jinja2); + __pyx_v_self->pydev_call_inside_jinja2 = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_call_inside_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_inside_jinja2); + __pyx_v_self->pydev_call_inside_jinja2 = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":11 + * cdef public object pydev_call_from_jinja2 + * cdef public object pydev_call_inside_jinja2 + * cdef public int is_tracing # <<<<<<<<<<<<<< + * cdef public tuple conditional_breakpoint_exception + * cdef public str pydev_message + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->is_tracing); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.is_tracing.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 11, __pyx_L1_error) + __pyx_v_self->is_tracing = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.is_tracing.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":12 + * cdef public object pydev_call_inside_jinja2 + * cdef public int is_tracing + * cdef public tuple conditional_breakpoint_exception # <<<<<<<<<<<<<< + * cdef public str pydev_message + * cdef public int suspend_type + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->conditional_breakpoint_exception); + __pyx_r = __pyx_v_self->conditional_breakpoint_exception; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyTuple_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->conditional_breakpoint_exception); + __Pyx_DECREF(__pyx_v_self->conditional_breakpoint_exception); + __pyx_v_self->conditional_breakpoint_exception = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.conditional_breakpoint_exception.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->conditional_breakpoint_exception); + __Pyx_DECREF(__pyx_v_self->conditional_breakpoint_exception); + __pyx_v_self->conditional_breakpoint_exception = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":13 + * cdef public int is_tracing + * cdef public tuple conditional_breakpoint_exception + * cdef public str pydev_message # <<<<<<<<<<<<<< + * cdef public int suspend_type + * cdef public int pydev_next_line + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_message); + __pyx_r = __pyx_v_self->pydev_message; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyString_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->pydev_message); + __Pyx_DECREF(__pyx_v_self->pydev_message); + __pyx_v_self->pydev_message = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_message.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_message); + __Pyx_DECREF(__pyx_v_self->pydev_message); + __pyx_v_self->pydev_message = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":14 + * cdef public tuple conditional_breakpoint_exception + * cdef public str pydev_message + * cdef public int suspend_type # <<<<<<<<<<<<<< + * cdef public int pydev_next_line + * cdef public str pydev_func_name + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->suspend_type); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.suspend_type.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 14, __pyx_L1_error) + __pyx_v_self->suspend_type = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.suspend_type.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":15 + * cdef public str pydev_message + * cdef public int suspend_type + * cdef public int pydev_next_line # <<<<<<<<<<<<<< + * cdef public str pydev_func_name + * cdef public bint suspended_at_unhandled + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_next_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_next_line.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 15, __pyx_L1_error) + __pyx_v_self->pydev_next_line = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_next_line.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":16 + * cdef public int suspend_type + * cdef public int pydev_next_line + * cdef public str pydev_func_name # <<<<<<<<<<<<<< + * cdef public bint suspended_at_unhandled + * cdef public str trace_suspend_type + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_func_name); + __pyx_r = __pyx_v_self->pydev_func_name; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyString_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(1, 16, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->pydev_func_name); + __Pyx_DECREF(__pyx_v_self->pydev_func_name); + __pyx_v_self->pydev_func_name = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_func_name.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_func_name); + __Pyx_DECREF(__pyx_v_self->pydev_func_name); + __pyx_v_self->pydev_func_name = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":17 + * cdef public int pydev_next_line + * cdef public str pydev_func_name + * cdef public bint suspended_at_unhandled # <<<<<<<<<<<<<< + * cdef public str trace_suspend_type + * cdef public object top_level_thread_tracer_no_back_frames + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->suspended_at_unhandled); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.suspended_at_unhandled.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_v_self->suspended_at_unhandled = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.suspended_at_unhandled.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":18 + * cdef public str pydev_func_name + * cdef public bint suspended_at_unhandled + * cdef public str trace_suspend_type # <<<<<<<<<<<<<< + * cdef public object top_level_thread_tracer_no_back_frames + * cdef public object top_level_thread_tracer_unhandled + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->trace_suspend_type); + __pyx_r = __pyx_v_self->trace_suspend_type; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyString_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(1, 18, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->trace_suspend_type); + __Pyx_DECREF(__pyx_v_self->trace_suspend_type); + __pyx_v_self->trace_suspend_type = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.trace_suspend_type.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->trace_suspend_type); + __Pyx_DECREF(__pyx_v_self->trace_suspend_type); + __pyx_v_self->trace_suspend_type = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":19 + * cdef public bint suspended_at_unhandled + * cdef public str trace_suspend_type + * cdef public object top_level_thread_tracer_no_back_frames # <<<<<<<<<<<<<< + * cdef public object top_level_thread_tracer_unhandled + * cdef public object thread_tracer + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->top_level_thread_tracer_no_back_frames); + __pyx_r = __pyx_v_self->top_level_thread_tracer_no_back_frames; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->top_level_thread_tracer_no_back_frames); + __Pyx_DECREF(__pyx_v_self->top_level_thread_tracer_no_back_frames); + __pyx_v_self->top_level_thread_tracer_no_back_frames = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->top_level_thread_tracer_no_back_frames); + __Pyx_DECREF(__pyx_v_self->top_level_thread_tracer_no_back_frames); + __pyx_v_self->top_level_thread_tracer_no_back_frames = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":20 + * cdef public str trace_suspend_type + * cdef public object top_level_thread_tracer_no_back_frames + * cdef public object top_level_thread_tracer_unhandled # <<<<<<<<<<<<<< + * cdef public object thread_tracer + * cdef public object step_in_initial_location + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->top_level_thread_tracer_unhandled); + __pyx_r = __pyx_v_self->top_level_thread_tracer_unhandled; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->top_level_thread_tracer_unhandled); + __Pyx_DECREF(__pyx_v_self->top_level_thread_tracer_unhandled); + __pyx_v_self->top_level_thread_tracer_unhandled = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->top_level_thread_tracer_unhandled); + __Pyx_DECREF(__pyx_v_self->top_level_thread_tracer_unhandled); + __pyx_v_self->top_level_thread_tracer_unhandled = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":21 + * cdef public object top_level_thread_tracer_no_back_frames + * cdef public object top_level_thread_tracer_unhandled + * cdef public object thread_tracer # <<<<<<<<<<<<<< + * cdef public object step_in_initial_location + * cdef public int pydev_smart_parent_offset + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->thread_tracer); + __pyx_r = __pyx_v_self->thread_tracer; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->thread_tracer); + __Pyx_DECREF(__pyx_v_self->thread_tracer); + __pyx_v_self->thread_tracer = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->thread_tracer); + __Pyx_DECREF(__pyx_v_self->thread_tracer); + __pyx_v_self->thread_tracer = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":22 + * cdef public object top_level_thread_tracer_unhandled + * cdef public object thread_tracer + * cdef public object step_in_initial_location # <<<<<<<<<<<<<< + * cdef public int pydev_smart_parent_offset + * cdef public int pydev_smart_child_offset + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->step_in_initial_location); + __pyx_r = __pyx_v_self->step_in_initial_location; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->step_in_initial_location); + __Pyx_DECREF(__pyx_v_self->step_in_initial_location); + __pyx_v_self->step_in_initial_location = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->step_in_initial_location); + __Pyx_DECREF(__pyx_v_self->step_in_initial_location); + __pyx_v_self->step_in_initial_location = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":23 + * cdef public object thread_tracer + * cdef public object step_in_initial_location + * cdef public int pydev_smart_parent_offset # <<<<<<<<<<<<<< + * cdef public int pydev_smart_child_offset + * cdef public tuple pydev_smart_step_into_variants + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_smart_parent_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_smart_parent_offset.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 23, __pyx_L1_error) + __pyx_v_self->pydev_smart_parent_offset = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_smart_parent_offset.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":24 + * cdef public object step_in_initial_location + * cdef public int pydev_smart_parent_offset + * cdef public int pydev_smart_child_offset # <<<<<<<<<<<<<< + * cdef public tuple pydev_smart_step_into_variants + * cdef public dict target_id_to_smart_step_into_variant + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_smart_child_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 24, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_smart_child_offset.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 24, __pyx_L1_error) + __pyx_v_self->pydev_smart_child_offset = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_smart_child_offset.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":25 + * cdef public int pydev_smart_parent_offset + * cdef public int pydev_smart_child_offset + * cdef public tuple pydev_smart_step_into_variants # <<<<<<<<<<<<<< + * cdef public dict target_id_to_smart_step_into_variant + * cdef public bint pydev_use_scoped_step_frame + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_smart_step_into_variants); + __pyx_r = __pyx_v_self->pydev_smart_step_into_variants; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyTuple_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(1, 25, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->pydev_smart_step_into_variants); + __Pyx_DECREF(__pyx_v_self->pydev_smart_step_into_variants); + __pyx_v_self->pydev_smart_step_into_variants = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_smart_step_into_variants.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_smart_step_into_variants); + __Pyx_DECREF(__pyx_v_self->pydev_smart_step_into_variants); + __pyx_v_self->pydev_smart_step_into_variants = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":26 + * cdef public int pydev_smart_child_offset + * cdef public tuple pydev_smart_step_into_variants + * cdef public dict target_id_to_smart_step_into_variant # <<<<<<<<<<<<<< + * cdef public bint pydev_use_scoped_step_frame + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->target_id_to_smart_step_into_variant); + __pyx_r = __pyx_v_self->target_id_to_smart_step_into_variant; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyDict_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(1, 26, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->target_id_to_smart_step_into_variant); + __Pyx_DECREF(__pyx_v_self->target_id_to_smart_step_into_variant); + __pyx_v_self->target_id_to_smart_step_into_variant = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.target_id_to_smart_step_into_variant.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->target_id_to_smart_step_into_variant); + __Pyx_DECREF(__pyx_v_self->target_id_to_smart_step_into_variant); + __pyx_v_self->target_id_to_smart_step_into_variant = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pxd":27 + * cdef public tuple pydev_smart_step_into_variants + * cdef public dict target_id_to_smart_step_into_variant + * cdef public bint pydev_use_scoped_step_frame # <<<<<<<<<<<<<< + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_use_scoped_step_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 27, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_use_scoped_step_frame.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 27, __pyx_L1_error) + __pyx_v_self->pydev_use_scoped_step_frame = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_use_scoped_step_frame.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_6__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_6__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + int __pyx_t_14; + int __pyx_t_15; + int __pyx_t_16; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_original_step_cmd, self.pydev_smart_child_offset, self.pydev_smart_parent_offset, self.pydev_smart_step_into_variants, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.pydev_use_scoped_step_frame, self.step_in_initial_location, self.suspend_type, self.suspended_at_unhandled, self.target_id_to_smart_step_into_variant, self.thread_tracer, self.top_level_thread_tracer_no_back_frames, self.top_level_thread_tracer_unhandled, self.trace_suspend_type) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->is_tracing); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_django_resolve_frame); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_next_line); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_notify_kill); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_original_step_cmd); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_smart_child_offset); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_smart_parent_offset); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_state); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_step_cmd); if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_use_scoped_step_frame); if (unlikely(!__pyx_t_10)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_11 = __Pyx_PyInt_From_int(__pyx_v_self->suspend_type); if (unlikely(!__pyx_t_11)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = __Pyx_PyBool_FromLong(__pyx_v_self->suspended_at_unhandled); if (unlikely(!__pyx_t_12)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_13 = PyTuple_New(26); if (unlikely(!__pyx_t_13)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_INCREF(__pyx_v_self->conditional_breakpoint_exception); + __Pyx_GIVEREF(__pyx_v_self->conditional_breakpoint_exception); + PyTuple_SET_ITEM(__pyx_t_13, 0, __pyx_v_self->conditional_breakpoint_exception); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_13, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_self->pydev_call_from_jinja2); + __Pyx_GIVEREF(__pyx_v_self->pydev_call_from_jinja2); + PyTuple_SET_ITEM(__pyx_t_13, 2, __pyx_v_self->pydev_call_from_jinja2); + __Pyx_INCREF(__pyx_v_self->pydev_call_inside_jinja2); + __Pyx_GIVEREF(__pyx_v_self->pydev_call_inside_jinja2); + PyTuple_SET_ITEM(__pyx_t_13, 3, __pyx_v_self->pydev_call_inside_jinja2); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_13, 4, __pyx_t_2); + __Pyx_INCREF(__pyx_v_self->pydev_func_name); + __Pyx_GIVEREF(__pyx_v_self->pydev_func_name); + PyTuple_SET_ITEM(__pyx_t_13, 5, __pyx_v_self->pydev_func_name); + __Pyx_INCREF(__pyx_v_self->pydev_message); + __Pyx_GIVEREF(__pyx_v_self->pydev_message); + PyTuple_SET_ITEM(__pyx_t_13, 6, __pyx_v_self->pydev_message); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_13, 7, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_13, 8, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_13, 9, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_13, 10, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_13, 11, __pyx_t_7); + __Pyx_INCREF(__pyx_v_self->pydev_smart_step_into_variants); + __Pyx_GIVEREF(__pyx_v_self->pydev_smart_step_into_variants); + PyTuple_SET_ITEM(__pyx_t_13, 12, __pyx_v_self->pydev_smart_step_into_variants); + __Pyx_INCREF(__pyx_v_self->pydev_smart_step_stop); + __Pyx_GIVEREF(__pyx_v_self->pydev_smart_step_stop); + PyTuple_SET_ITEM(__pyx_t_13, 13, __pyx_v_self->pydev_smart_step_stop); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_13, 14, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_9); + PyTuple_SET_ITEM(__pyx_t_13, 15, __pyx_t_9); + __Pyx_INCREF(__pyx_v_self->pydev_step_stop); + __Pyx_GIVEREF(__pyx_v_self->pydev_step_stop); + PyTuple_SET_ITEM(__pyx_t_13, 16, __pyx_v_self->pydev_step_stop); + __Pyx_GIVEREF(__pyx_t_10); + PyTuple_SET_ITEM(__pyx_t_13, 17, __pyx_t_10); + __Pyx_INCREF(__pyx_v_self->step_in_initial_location); + __Pyx_GIVEREF(__pyx_v_self->step_in_initial_location); + PyTuple_SET_ITEM(__pyx_t_13, 18, __pyx_v_self->step_in_initial_location); + __Pyx_GIVEREF(__pyx_t_11); + PyTuple_SET_ITEM(__pyx_t_13, 19, __pyx_t_11); + __Pyx_GIVEREF(__pyx_t_12); + PyTuple_SET_ITEM(__pyx_t_13, 20, __pyx_t_12); + __Pyx_INCREF(__pyx_v_self->target_id_to_smart_step_into_variant); + __Pyx_GIVEREF(__pyx_v_self->target_id_to_smart_step_into_variant); + PyTuple_SET_ITEM(__pyx_t_13, 21, __pyx_v_self->target_id_to_smart_step_into_variant); + __Pyx_INCREF(__pyx_v_self->thread_tracer); + __Pyx_GIVEREF(__pyx_v_self->thread_tracer); + PyTuple_SET_ITEM(__pyx_t_13, 22, __pyx_v_self->thread_tracer); + __Pyx_INCREF(__pyx_v_self->top_level_thread_tracer_no_back_frames); + __Pyx_GIVEREF(__pyx_v_self->top_level_thread_tracer_no_back_frames); + PyTuple_SET_ITEM(__pyx_t_13, 23, __pyx_v_self->top_level_thread_tracer_no_back_frames); + __Pyx_INCREF(__pyx_v_self->top_level_thread_tracer_unhandled); + __Pyx_GIVEREF(__pyx_v_self->top_level_thread_tracer_unhandled); + PyTuple_SET_ITEM(__pyx_t_13, 24, __pyx_v_self->top_level_thread_tracer_unhandled); + __Pyx_INCREF(__pyx_v_self->trace_suspend_type); + __Pyx_GIVEREF(__pyx_v_self->trace_suspend_type); + PyTuple_SET_ITEM(__pyx_t_13, 25, __pyx_v_self->trace_suspend_type); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_t_3 = 0; + __pyx_t_4 = 0; + __pyx_t_5 = 0; + __pyx_t_6 = 0; + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_9 = 0; + __pyx_t_10 = 0; + __pyx_t_11 = 0; + __pyx_t_12 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_13); + __pyx_t_13 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_original_step_cmd, self.pydev_smart_child_offset, self.pydev_smart_parent_offset, self.pydev_smart_step_into_variants, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.pydev_use_scoped_step_frame, self.step_in_initial_location, self.suspend_type, self.suspended_at_unhandled, self.target_id_to_smart_step_into_variant, self.thread_tracer, self.top_level_thread_tracer_no_back_frames, self.top_level_thread_tracer_unhandled, self.trace_suspend_type) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_13 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_13)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __pyx_v__dict = __pyx_t_13; + __pyx_t_13 = 0; + + /* "(tree fragment)":7 + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_original_step_cmd, self.pydev_smart_child_offset, self.pydev_smart_parent_offset, self.pydev_smart_step_into_variants, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.pydev_use_scoped_step_frame, self.step_in_initial_location, self.suspend_type, self.suspended_at_unhandled, self.target_id_to_smart_step_into_variant, self.thread_tracer, self.top_level_thread_tracer_no_back_frames, self.top_level_thread_tracer_unhandled, self.trace_suspend_type) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_14 = (__pyx_v__dict != Py_None); + __pyx_t_15 = (__pyx_t_14 != 0); + if (__pyx_t_15) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_13 = PyTuple_New(1); if (unlikely(!__pyx_t_13)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_13, 0, __pyx_v__dict); + __pyx_t_12 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_13); if (unlikely(!__pyx_t_12)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_12)); + __pyx_t_12 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_into_variants is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None or self.step_in_initial_location is not None or self.target_id_to_smart_step_into_variant is not None or self.thread_tracer is not None or self.top_level_thread_tracer_no_back_frames is not None or self.top_level_thread_tracer_unhandled is not None or self.trace_suspend_type is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_original_step_cmd, self.pydev_smart_child_offset, self.pydev_smart_parent_offset, self.pydev_smart_step_into_variants, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.pydev_use_scoped_step_frame, self.step_in_initial_location, self.suspend_type, self.suspended_at_unhandled, self.target_id_to_smart_step_into_variant, self.thread_tracer, self.top_level_thread_tracer_no_back_frames, self.top_level_thread_tracer_unhandled, self.trace_suspend_type) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_into_variants is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None or self.step_in_initial_location is not None or self.target_id_to_smart_step_into_variant is not None or self.thread_tracer is not None or self.top_level_thread_tracer_no_back_frames is not None or self.top_level_thread_tracer_unhandled is not None or self.trace_suspend_type is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, None), state + */ + /*else*/ { + __pyx_t_14 = (__pyx_v_self->conditional_breakpoint_exception != ((PyObject*)Py_None)); + __pyx_t_16 = (__pyx_t_14 != 0); + if (!__pyx_t_16) { + } else { + __pyx_t_15 = __pyx_t_16; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_16 = (__pyx_v_self->pydev_call_from_jinja2 != Py_None); + __pyx_t_14 = (__pyx_t_16 != 0); + if (!__pyx_t_14) { + } else { + __pyx_t_15 = __pyx_t_14; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_self->pydev_call_inside_jinja2 != Py_None); + __pyx_t_16 = (__pyx_t_14 != 0); + if (!__pyx_t_16) { + } else { + __pyx_t_15 = __pyx_t_16; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_16 = (__pyx_v_self->pydev_func_name != ((PyObject*)Py_None)); + __pyx_t_14 = (__pyx_t_16 != 0); + if (!__pyx_t_14) { + } else { + __pyx_t_15 = __pyx_t_14; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_self->pydev_message != ((PyObject*)Py_None)); + __pyx_t_16 = (__pyx_t_14 != 0); + if (!__pyx_t_16) { + } else { + __pyx_t_15 = __pyx_t_16; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_16 = (__pyx_v_self->pydev_smart_step_into_variants != ((PyObject*)Py_None)); + __pyx_t_14 = (__pyx_t_16 != 0); + if (!__pyx_t_14) { + } else { + __pyx_t_15 = __pyx_t_14; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_self->pydev_smart_step_stop != Py_None); + __pyx_t_16 = (__pyx_t_14 != 0); + if (!__pyx_t_16) { + } else { + __pyx_t_15 = __pyx_t_16; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_16 = (__pyx_v_self->pydev_step_stop != Py_None); + __pyx_t_14 = (__pyx_t_16 != 0); + if (!__pyx_t_14) { + } else { + __pyx_t_15 = __pyx_t_14; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_self->step_in_initial_location != Py_None); + __pyx_t_16 = (__pyx_t_14 != 0); + if (!__pyx_t_16) { + } else { + __pyx_t_15 = __pyx_t_16; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_16 = (__pyx_v_self->target_id_to_smart_step_into_variant != ((PyObject*)Py_None)); + __pyx_t_14 = (__pyx_t_16 != 0); + if (!__pyx_t_14) { + } else { + __pyx_t_15 = __pyx_t_14; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_self->thread_tracer != Py_None); + __pyx_t_16 = (__pyx_t_14 != 0); + if (!__pyx_t_16) { + } else { + __pyx_t_15 = __pyx_t_16; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_16 = (__pyx_v_self->top_level_thread_tracer_no_back_frames != Py_None); + __pyx_t_14 = (__pyx_t_16 != 0); + if (!__pyx_t_14) { + } else { + __pyx_t_15 = __pyx_t_14; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_self->top_level_thread_tracer_unhandled != Py_None); + __pyx_t_16 = (__pyx_t_14 != 0); + if (!__pyx_t_16) { + } else { + __pyx_t_15 = __pyx_t_16; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_16 = (__pyx_v_self->trace_suspend_type != ((PyObject*)Py_None)); + __pyx_t_14 = (__pyx_t_16 != 0); + __pyx_t_15 = __pyx_t_14; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_15; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_into_variants is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None or self.step_in_initial_location is not None or self.target_id_to_smart_step_into_variant is not None or self.thread_tracer is not None or self.top_level_thread_tracer_no_back_frames is not None or self.top_level_thread_tracer_unhandled is not None or self.trace_suspend_type is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, None), state + * else: + */ + __pyx_t_15 = (__pyx_v_use_setstate != 0); + if (__pyx_t_15) { + + /* "(tree fragment)":13 + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_into_variants is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None or self.step_in_initial_location is not None or self.target_id_to_smart_step_into_variant is not None or self.thread_tracer is not None or self.top_level_thread_tracer_no_back_frames is not None or self.top_level_thread_tracer_unhandled is not None or self.trace_suspend_type is not None + * if use_setstate: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_12, __pyx_n_s_pyx_unpickle_PyDBAdditionalThr); if (unlikely(!__pyx_t_12)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_13 = PyTuple_New(3); if (unlikely(!__pyx_t_13)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_13, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_123419394); + __Pyx_GIVEREF(__pyx_int_123419394); + PyTuple_SET_ITEM(__pyx_t_13, 1, __pyx_int_123419394); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_13, 2, Py_None); + __pyx_t_11 = PyTuple_New(3); if (unlikely(!__pyx_t_11)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_12); + PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_12); + __Pyx_GIVEREF(__pyx_t_13); + PyTuple_SET_ITEM(__pyx_t_11, 1, __pyx_t_13); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_11, 2, __pyx_v_state); + __pyx_t_12 = 0; + __pyx_t_13 = 0; + __pyx_r = __pyx_t_11; + __pyx_t_11 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_into_variants is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None or self.step_in_initial_location is not None or self.target_id_to_smart_step_into_variant is not None or self.thread_tracer is not None or self.top_level_thread_tracer_no_back_frames is not None or self.top_level_thread_tracer_unhandled is not None or self.trace_suspend_type is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, None), state + * else: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_11, __pyx_n_s_pyx_unpickle_PyDBAdditionalThr); if (unlikely(!__pyx_t_11)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_13 = PyTuple_New(3); if (unlikely(!__pyx_t_13)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_13, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_123419394); + __Pyx_GIVEREF(__pyx_int_123419394); + PyTuple_SET_ITEM(__pyx_t_13, 1, __pyx_int_123419394); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_13, 2, __pyx_v_state); + __pyx_t_12 = PyTuple_New(2); if (unlikely(!__pyx_t_12)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_GIVEREF(__pyx_t_11); + PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_11); + __Pyx_GIVEREF(__pyx_t_13); + PyTuple_SET_ITEM(__pyx_t_12, 1, __pyx_t_13); + __pyx_t_11 = 0; + __pyx_t_13 = 0; + __pyx_r = __pyx_t_12; + __pyx_t_12 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_13); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_8__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_8__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBAdditionalThreadInfo__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0x75b3b02, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":145 + * + * + * def set_additional_thread_info(thread): # <<<<<<<<<<<<<< + * try: + * additional_info = thread.additional_info + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_1set_additional_thread_info(PyObject *__pyx_self, PyObject *__pyx_v_thread); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_1set_additional_thread_info = {"set_additional_thread_info", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_1set_additional_thread_info, METH_O, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_1set_additional_thread_info(PyObject *__pyx_self, PyObject *__pyx_v_thread) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("set_additional_thread_info (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_set_additional_thread_info(__pyx_self, ((PyObject *)__pyx_v_thread)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_set_additional_thread_info(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_thread) { + PyObject *__pyx_v_additional_info = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("set_additional_thread_info", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":146 + * + * def set_additional_thread_info(thread): + * try: # <<<<<<<<<<<<<< + * additional_info = thread.additional_info + * if additional_info is None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":147 + * def set_additional_thread_info(thread): + * try: + * additional_info = thread.additional_info # <<<<<<<<<<<<<< + * if additional_info is None: + * raise AttributeError() + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_thread, __pyx_n_s_additional_info); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 147, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v_additional_info = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":148 + * try: + * additional_info = thread.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + __pyx_t_5 = (__pyx_v_additional_info == Py_None); + __pyx_t_6 = (__pyx_t_5 != 0); + if (unlikely(__pyx_t_6)) { + + /* "_pydevd_bundle/pydevd_cython.pyx":149 + * additional_info = thread.additional_info + * if additional_info is None: + * raise AttributeError() # <<<<<<<<<<<<<< + * except: + * with _set_additional_thread_info_lock: + */ + __pyx_t_4 = __Pyx_PyObject_CallNoArg(__pyx_builtin_AttributeError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 149, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(0, 149, __pyx_L3_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":148 + * try: + * additional_info = thread.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":146 + * + * def set_additional_thread_info(thread): + * try: # <<<<<<<<<<<<<< + * additional_info = thread.additional_info + * if additional_info is None: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":150 + * if additional_info is None: + * raise AttributeError() + * except: # <<<<<<<<<<<<<< + * with _set_additional_thread_info_lock: + * # If it's not there, set it within a lock to avoid any racing + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.set_additional_thread_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_4, &__pyx_t_7, &__pyx_t_8) < 0) __PYX_ERR(0, 150, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_8); + + /* "_pydevd_bundle/pydevd_cython.pyx":151 + * raise AttributeError() + * except: + * with _set_additional_thread_info_lock: # <<<<<<<<<<<<<< + * # If it's not there, set it within a lock to avoid any racing + * # conditions. + */ + /*with:*/ { + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_set_additional_thread_info_lock); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 151, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = __Pyx_PyObject_LookupSpecial(__pyx_t_9, __pyx_n_s_exit); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 151, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_12 = __Pyx_PyObject_LookupSpecial(__pyx_t_9, __pyx_n_s_enter); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 151, __pyx_L12_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_13 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_13)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_13); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + __pyx_t_11 = (__pyx_t_13) ? __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_t_13) : __Pyx_PyObject_CallNoArg(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 151, __pyx_L12_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + /*try:*/ { + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_14, &__pyx_t_15, &__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":154 + * # If it's not there, set it within a lock to avoid any racing + * # conditions. + * additional_info = getattr(thread, 'additional_info', None) # <<<<<<<<<<<<<< + * if additional_info is None: + * additional_info = PyDBAdditionalThreadInfo() + */ + __pyx_t_9 = __Pyx_GetAttr3(__pyx_v_thread, __pyx_n_s_additional_info, Py_None); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 154, __pyx_L18_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_XDECREF_SET(__pyx_v_additional_info, __pyx_t_9); + __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":155 + * # conditions. + * additional_info = getattr(thread, 'additional_info', None) + * if additional_info is None: # <<<<<<<<<<<<<< + * additional_info = PyDBAdditionalThreadInfo() + * thread.additional_info = additional_info + */ + __pyx_t_6 = (__pyx_v_additional_info == Py_None); + __pyx_t_5 = (__pyx_t_6 != 0); + if (__pyx_t_5) { + + /* "_pydevd_bundle/pydevd_cython.pyx":156 + * additional_info = getattr(thread, 'additional_info', None) + * if additional_info is None: + * additional_info = PyDBAdditionalThreadInfo() # <<<<<<<<<<<<<< + * thread.additional_info = additional_info + * + */ + __pyx_t_9 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo)); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 156, __pyx_L18_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF_SET(__pyx_v_additional_info, __pyx_t_9); + __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":155 + * # conditions. + * additional_info = getattr(thread, 'additional_info', None) + * if additional_info is None: # <<<<<<<<<<<<<< + * additional_info = PyDBAdditionalThreadInfo() + * thread.additional_info = additional_info + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":157 + * if additional_info is None: + * additional_info = PyDBAdditionalThreadInfo() + * thread.additional_info = additional_info # <<<<<<<<<<<<<< + * + * return additional_info + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_thread, __pyx_n_s_additional_info, __pyx_v_additional_info) < 0) __PYX_ERR(0, 157, __pyx_L18_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":151 + * raise AttributeError() + * except: + * with _set_additional_thread_info_lock: # <<<<<<<<<<<<<< + * # If it's not there, set it within a lock to avoid any racing + * # conditions. + */ + } + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + goto __pyx_L25_try_end; + __pyx_L18_error:; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.set_additional_thread_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_9, &__pyx_t_11, &__pyx_t_12) < 0) __PYX_ERR(0, 151, __pyx_L20_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_13 = PyTuple_Pack(3, __pyx_t_9, __pyx_t_11, __pyx_t_12); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 151, __pyx_L20_except_error) + __Pyx_GOTREF(__pyx_t_13); + __pyx_t_17 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_13, NULL); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 151, __pyx_L20_except_error) + __Pyx_GOTREF(__pyx_t_17); + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_17); + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + if (__pyx_t_5 < 0) __PYX_ERR(0, 151, __pyx_L20_except_error) + __pyx_t_6 = ((!(__pyx_t_5 != 0)) != 0); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_9); + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ErrRestoreWithState(__pyx_t_9, __pyx_t_11, __pyx_t_12); + __pyx_t_9 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; + __PYX_ERR(0, 151, __pyx_L20_except_error) + } + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + goto __pyx_L19_exception_handled; + } + __pyx_L20_except_error:; + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16); + goto __pyx_L5_except_error; + __pyx_L19_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16); + __pyx_L25_try_end:; + } + } + /*finally:*/ { + /*normal exit:*/{ + if (__pyx_t_10) { + __pyx_t_16 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_tuple__2, NULL); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 151, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + } + goto __pyx_L17; + } + __pyx_L17:; + } + goto __pyx_L30; + __pyx_L12_error:; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + goto __pyx_L5_except_error; + __pyx_L30:; + } + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L4_exception_handled; + } + __pyx_L5_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":146 + * + * def set_additional_thread_info(thread): + * try: # <<<<<<<<<<<<<< + * additional_info = thread.additional_info + * if additional_info is None: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L4_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + __pyx_L8_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":159 + * thread.additional_info = additional_info + * + * return additional_info # <<<<<<<<<<<<<< + * import linecache + * import os.path + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(!__pyx_v_additional_info)) { __Pyx_RaiseUnboundLocalError("additional_info"); __PYX_ERR(0, 159, __pyx_L1_error) } + __Pyx_INCREF(__pyx_v_additional_info); + __pyx_r = __pyx_v_additional_info; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":145 + * + * + * def set_additional_thread_info(thread): # <<<<<<<<<<<<<< + * try: + * additional_info = thread.additional_info + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_13); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.set_additional_thread_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_additional_info); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":176 + * except ImportError: + * + * def get_smart_step_into_variant_from_frame_offset(*args, **kwargs): # <<<<<<<<<<<<<< + * return None + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_3get_smart_step_into_variant_from_frame_offset(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_3get_smart_step_into_variant_from_frame_offset = {"get_smart_step_into_variant_from_frame_offset", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_3get_smart_step_into_variant_from_frame_offset, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_3get_smart_step_into_variant_from_frame_offset(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + CYTHON_UNUSED PyObject *__pyx_v_args = 0; + CYTHON_UNUSED PyObject *__pyx_v_kwargs = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_smart_step_into_variant_from_frame_offset (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "get_smart_step_into_variant_from_frame_offset", 1))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_2get_smart_step_into_variant_from_frame_offset(__pyx_self, __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_2get_smart_step_into_variant_from_frame_offset(CYTHON_UNUSED PyObject *__pyx_self, CYTHON_UNUSED PyObject *__pyx_v_args, CYTHON_UNUSED PyObject *__pyx_v_kwargs) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_smart_step_into_variant_from_frame_offset", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":177 + * + * def get_smart_step_into_variant_from_frame_offset(*args, **kwargs): + * return None # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":176 + * except ImportError: + * + * def get_smart_step_into_variant_from_frame_offset(*args, **kwargs): # <<<<<<<<<<<<<< + * return None + * + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":212 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef is_unhandled_exception(container_obj, py_db, frame, int last_raise_line, set raise_lines): # <<<<<<<<<<<<<< + * # ELSE + * # def is_unhandled_exception(container_obj, py_db, frame, last_raise_line, raise_lines): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_is_unhandled_exception(PyObject *__pyx_v_container_obj, PyObject *__pyx_v_py_db, PyObject *__pyx_v_frame, int __pyx_v_last_raise_line, PyObject *__pyx_v_raise_lines) { + PyObject *__pyx_v_try_except_infos = NULL; + PyObject *__pyx_v_valid_try_except_infos = NULL; + PyObject *__pyx_v_try_except_info = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + Py_ssize_t __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + int __pyx_t_11; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("is_unhandled_exception", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":216 + * # def is_unhandled_exception(container_obj, py_db, frame, last_raise_line, raise_lines): + * # ENDIF + * if frame.f_lineno in raise_lines: # <<<<<<<<<<<<<< + * return True + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 216, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (unlikely(__pyx_v_raise_lines == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 216, __pyx_L1_error) + } + __pyx_t_2 = (__Pyx_PySet_ContainsTF(__pyx_t_1, __pyx_v_raise_lines, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 216, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":217 + * # ENDIF + * if frame.f_lineno in raise_lines: + * return True # <<<<<<<<<<<<<< + * + * else: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":216 + * # def is_unhandled_exception(container_obj, py_db, frame, last_raise_line, raise_lines): + * # ENDIF + * if frame.f_lineno in raise_lines: # <<<<<<<<<<<<<< + * return True + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":220 + * + * else: + * try_except_infos = container_obj.try_except_infos # <<<<<<<<<<<<<< + * if try_except_infos is None: + * container_obj.try_except_infos = try_except_infos = py_db.collect_try_except_info(frame.f_code) + */ + /*else*/ { + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_container_obj, __pyx_n_s_try_except_infos); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 220, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_try_except_infos = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":221 + * else: + * try_except_infos = container_obj.try_except_infos + * if try_except_infos is None: # <<<<<<<<<<<<<< + * container_obj.try_except_infos = try_except_infos = py_db.collect_try_except_info(frame.f_code) + * + */ + __pyx_t_3 = (__pyx_v_try_except_infos == Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":222 + * try_except_infos = container_obj.try_except_infos + * if try_except_infos is None: + * container_obj.try_except_infos = try_except_infos = py_db.collect_try_except_info(frame.f_code) # <<<<<<<<<<<<<< + * + * if not try_except_infos: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_collect_try_except_info); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 222, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 222, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_1 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 222, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_v_container_obj, __pyx_n_s_try_except_infos, __pyx_t_1) < 0) __PYX_ERR(0, 222, __pyx_L1_error) + __Pyx_INCREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_try_except_infos, __pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":221 + * else: + * try_except_infos = container_obj.try_except_infos + * if try_except_infos is None: # <<<<<<<<<<<<<< + * container_obj.try_except_infos = try_except_infos = py_db.collect_try_except_info(frame.f_code) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":224 + * container_obj.try_except_infos = try_except_infos = py_db.collect_try_except_info(frame.f_code) + * + * if not try_except_infos: # <<<<<<<<<<<<<< + * # Consider the last exception as unhandled because there's no try..except in it. + * return True + */ + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_try_except_infos); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 224, __pyx_L1_error) + __pyx_t_3 = ((!__pyx_t_2) != 0); + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":226 + * if not try_except_infos: + * # Consider the last exception as unhandled because there's no try..except in it. + * return True # <<<<<<<<<<<<<< + * else: + * # Now, consider only the try..except for the raise + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":224 + * container_obj.try_except_infos = try_except_infos = py_db.collect_try_except_info(frame.f_code) + * + * if not try_except_infos: # <<<<<<<<<<<<<< + * # Consider the last exception as unhandled because there's no try..except in it. + * return True + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":229 + * else: + * # Now, consider only the try..except for the raise + * valid_try_except_infos = [] # <<<<<<<<<<<<<< + * for try_except_info in try_except_infos: + * if try_except_info.is_line_in_try_block(last_raise_line): + */ + /*else*/ { + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_valid_try_except_infos = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":230 + * # Now, consider only the try..except for the raise + * valid_try_except_infos = [] + * for try_except_info in try_except_infos: # <<<<<<<<<<<<<< + * if try_except_info.is_line_in_try_block(last_raise_line): + * valid_try_except_infos.append(try_except_info) + */ + if (likely(PyList_CheckExact(__pyx_v_try_except_infos)) || PyTuple_CheckExact(__pyx_v_try_except_infos)) { + __pyx_t_1 = __pyx_v_try_except_infos; __Pyx_INCREF(__pyx_t_1); __pyx_t_7 = 0; + __pyx_t_8 = NULL; + } else { + __pyx_t_7 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_try_except_infos); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 230, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 230, __pyx_L1_error) + } + for (;;) { + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_4); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 230, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 230, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_4); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 230, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 230, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } + } else { + __pyx_t_4 = __pyx_t_8(__pyx_t_1); + if (unlikely(!__pyx_t_4)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 230, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_4); + } + __Pyx_XDECREF_SET(__pyx_v_try_except_info, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":231 + * valid_try_except_infos = [] + * for try_except_info in try_except_infos: + * if try_except_info.is_line_in_try_block(last_raise_line): # <<<<<<<<<<<<<< + * valid_try_except_infos.append(try_except_info) + * + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_try_except_info, __pyx_n_s_is_line_in_try_block); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 231, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_last_raise_line); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 231, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_4 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_9, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 231, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 231, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":232 + * for try_except_info in try_except_infos: + * if try_except_info.is_line_in_try_block(last_raise_line): + * valid_try_except_infos.append(try_except_info) # <<<<<<<<<<<<<< + * + * if not valid_try_except_infos: + */ + __pyx_t_10 = __Pyx_PyList_Append(__pyx_v_valid_try_except_infos, __pyx_v_try_except_info); if (unlikely(__pyx_t_10 == ((int)-1))) __PYX_ERR(0, 232, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":231 + * valid_try_except_infos = [] + * for try_except_info in try_except_infos: + * if try_except_info.is_line_in_try_block(last_raise_line): # <<<<<<<<<<<<<< + * valid_try_except_infos.append(try_except_info) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":230 + * # Now, consider only the try..except for the raise + * valid_try_except_infos = [] + * for try_except_info in try_except_infos: # <<<<<<<<<<<<<< + * if try_except_info.is_line_in_try_block(last_raise_line): + * valid_try_except_infos.append(try_except_info) + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":234 + * valid_try_except_infos.append(try_except_info) + * + * if not valid_try_except_infos: # <<<<<<<<<<<<<< + * return True + * + */ + __pyx_t_3 = (PyList_GET_SIZE(__pyx_v_valid_try_except_infos) != 0); + __pyx_t_2 = ((!__pyx_t_3) != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":235 + * + * if not valid_try_except_infos: + * return True # <<<<<<<<<<<<<< + * + * else: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":234 + * valid_try_except_infos.append(try_except_info) + * + * if not valid_try_except_infos: # <<<<<<<<<<<<<< + * return True + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":242 + * # where one try..except is inside the other with only a raise + * # and it's gotten in the except line. + * for try_except_info in try_except_infos: # <<<<<<<<<<<<<< + * if try_except_info.is_line_in_except_block(frame.f_lineno): + * if ( + */ + /*else*/ { + if (likely(PyList_CheckExact(__pyx_v_try_except_infos)) || PyTuple_CheckExact(__pyx_v_try_except_infos)) { + __pyx_t_1 = __pyx_v_try_except_infos; __Pyx_INCREF(__pyx_t_1); __pyx_t_7 = 0; + __pyx_t_8 = NULL; + } else { + __pyx_t_7 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_try_except_infos); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 242, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 242, __pyx_L1_error) + } + for (;;) { + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_4); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 242, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 242, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_4); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 242, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 242, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } + } else { + __pyx_t_4 = __pyx_t_8(__pyx_t_1); + if (unlikely(!__pyx_t_4)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 242, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_4); + } + __Pyx_XDECREF_SET(__pyx_v_try_except_info, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":243 + * # and it's gotten in the except line. + * for try_except_info in try_except_infos: + * if try_except_info.is_line_in_except_block(frame.f_lineno): # <<<<<<<<<<<<<< + * if ( + * frame.f_lineno == try_except_info.except_line or + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_try_except_info, __pyx_n_s_is_line_in_except_block); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 243, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 243, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_4 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_9, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 243, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 243, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":245 + * if try_except_info.is_line_in_except_block(frame.f_lineno): + * if ( + * frame.f_lineno == try_except_info.except_line or # <<<<<<<<<<<<<< + * frame.f_lineno in try_except_info.raise_lines_in_except + * ): + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 245, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_try_except_info, __pyx_n_s_except_line); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 245, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyObject_RichCompare(__pyx_t_4, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 245, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 245, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (!__pyx_t_3) { + } else { + __pyx_t_2 = __pyx_t_3; + goto __pyx_L14_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":246 + * if ( + * frame.f_lineno == try_except_info.except_line or + * frame.f_lineno in try_except_info.raise_lines_in_except # <<<<<<<<<<<<<< + * ): + * # In a raise inside a try..except block or some except which doesn't + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 246, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_try_except_info, __pyx_n_s_raise_lines_in_except); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 246, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = (__Pyx_PySequence_ContainsTF(__pyx_t_6, __pyx_t_5, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 246, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_11 = (__pyx_t_3 != 0); + __pyx_t_2 = __pyx_t_11; + __pyx_L14_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":244 + * for try_except_info in try_except_infos: + * if try_except_info.is_line_in_except_block(frame.f_lineno): + * if ( # <<<<<<<<<<<<<< + * frame.f_lineno == try_except_info.except_line or + * frame.f_lineno in try_except_info.raise_lines_in_except + */ + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":250 + * # In a raise inside a try..except block or some except which doesn't + * # match the raised exception. + * return True # <<<<<<<<<<<<<< + * return False + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":244 + * for try_except_info in try_except_infos: + * if try_except_info.is_line_in_except_block(frame.f_lineno): + * if ( # <<<<<<<<<<<<<< + * frame.f_lineno == try_except_info.except_line or + * frame.f_lineno in try_except_info.raise_lines_in_except + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":243 + * # and it's gotten in the except line. + * for try_except_info in try_except_infos: + * if try_except_info.is_line_in_except_block(frame.f_lineno): # <<<<<<<<<<<<<< + * if ( + * frame.f_lineno == try_except_info.except_line or + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":242 + * # where one try..except is inside the other with only a raise + * # and it's gotten in the except line. + * for try_except_info in try_except_infos: # <<<<<<<<<<<<<< + * if try_except_info.is_line_in_except_block(frame.f_lineno): + * if ( + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":251 + * # match the raised exception. + * return True + * return False # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":212 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef is_unhandled_exception(container_obj, py_db, frame, int last_raise_line, set raise_lines): # <<<<<<<<<<<<<< + * # ELSE + * # def is_unhandled_exception(container_obj, py_db, frame, last_raise_line, raise_lines): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.is_unhandled_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_try_except_infos); + __Pyx_XDECREF(__pyx_v_valid_try_except_infos); + __Pyx_XDECREF(__pyx_v_try_except_info); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":257 + * cdef class _TryExceptContainerObj: + * cdef public list try_except_infos; + * def __init__(self): # <<<<<<<<<<<<<< + * self.try_except_infos = None + * # ELSE + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} + if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__init__", 0))) return -1; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":258 + * cdef public list try_except_infos; + * def __init__(self): + * self.try_except_infos = None # <<<<<<<<<<<<<< + * # ELSE + * # class _TryExceptContainerObj(object): + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->try_except_infos); + __Pyx_DECREF(__pyx_v_self->try_except_infos); + __pyx_v_self->try_except_infos = ((PyObject*)Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":257 + * cdef class _TryExceptContainerObj: + * cdef public list try_except_infos; + * def __init__(self): # <<<<<<<<<<<<<< + * self.try_except_infos = None + * # ELSE + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":256 + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class _TryExceptContainerObj: + * cdef public list try_except_infos; # <<<<<<<<<<<<<< + * def __init__(self): + * self.try_except_infos = None + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->try_except_infos); + __pyx_r = __pyx_v_self->try_except_infos; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyList_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 256, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->try_except_infos); + __Pyx_DECREF(__pyx_v_self->try_except_infos); + __pyx_v_self->try_except_infos = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython._TryExceptContainerObj.try_except_infos.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->try_except_infos); + __Pyx_DECREF(__pyx_v_self->try_except_infos); + __pyx_v_self->try_except_infos = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_2__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_2__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.try_except_infos,) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->try_except_infos); + __Pyx_GIVEREF(__pyx_v_self->try_except_infos); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->try_except_infos); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.try_except_infos,) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self.try_except_infos,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); + __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.try_except_infos is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.try_except_infos,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.try_except_infos is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, None), state + */ + /*else*/ { + __pyx_t_3 = (__pyx_v_self->try_except_infos != ((PyObject*)Py_None)); + __pyx_v_use_setstate = __pyx_t_3; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.try_except_infos is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, None), state + * else: + */ + __pyx_t_3 = (__pyx_v_use_setstate != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":13 + * use_setstate = self.try_except_infos is not None + * if use_setstate: + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle__TryExceptContain); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_210464433); + __Pyx_GIVEREF(__pyx_int_210464433); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_210464433); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.try_except_infos is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, None), state + * else: + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle__TryExceptContainerObj__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle__TryExceptContain); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_210464433); + __Pyx_GIVEREF(__pyx_int_210464433); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_210464433); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __pyx_t_5 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython._TryExceptContainerObj.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle__TryExceptContainerObj__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_4__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_4__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle__TryExceptContainerObj__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle__TryExceptContainerObj__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle__TryExceptContainerObj, (type(self), 0xc8b6eb1, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle__TryExceptContainerObj__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython._TryExceptContainerObj.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":293 + * cdef int should_skip + * cdef object exc_info + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args # In the cython version we don't need to pass the frame + * self.should_skip = -1 # On cythonized version, put in instance. + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_args,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_args)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 293, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_args = ((PyObject*)values[0]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 293, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_args), (&PyTuple_Type), 1, "args", 1))) __PYX_ERR(0, 293, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_args); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_args) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":294 + * cdef object exc_info + * def __init__(self, tuple args): + * self._args = args # In the cython version we don't need to pass the frame # <<<<<<<<<<<<<< + * self.should_skip = -1 # On cythonized version, put in instance. + * self.exc_info = () + */ + __Pyx_INCREF(__pyx_v_args); + __Pyx_GIVEREF(__pyx_v_args); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = __pyx_v_args; + + /* "_pydevd_bundle/pydevd_cython.pyx":295 + * def __init__(self, tuple args): + * self._args = args # In the cython version we don't need to pass the frame + * self.should_skip = -1 # On cythonized version, put in instance. # <<<<<<<<<<<<<< + * self.exc_info = () + * # ELSE + */ + __pyx_v_self->should_skip = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":296 + * self._args = args # In the cython version we don't need to pass the frame + * self.should_skip = -1 # On cythonized version, put in instance. + * self.exc_info = () # <<<<<<<<<<<<<< + * # ELSE + * # should_skip = -1 # Default value in class (put in instance on set). + */ + __Pyx_INCREF(__pyx_empty_tuple); + __Pyx_GIVEREF(__pyx_empty_tuple); + __Pyx_GOTREF(__pyx_v_self->exc_info); + __Pyx_DECREF(__pyx_v_self->exc_info); + __pyx_v_self->exc_info = __pyx_empty_tuple; + + /* "_pydevd_bundle/pydevd_cython.pyx":293 + * cdef int should_skip + * cdef object exc_info + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args # In the cython version we don't need to pass the frame + * self.should_skip = -1 # On cythonized version, put in instance. + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":307 + * # ENDIF + * + * def set_suspend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._args[0].set_suspend(*args, **kwargs) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_3set_suspend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_3set_suspend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_v_kwargs = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("set_suspend (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "set_suspend", 1))) return NULL; + __pyx_v_kwargs = (__pyx_kwds) ? PyDict_Copy(__pyx_kwds) : PyDict_New(); if (unlikely(!__pyx_v_kwargs)) return NULL; + __Pyx_GOTREF(__pyx_v_kwargs); + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_2set_suspend(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_2set_suspend(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("set_suspend", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":308 + * + * def set_suspend(self, *args, **kwargs): + * self._args[0].set_suspend(*args, **kwargs) # <<<<<<<<<<<<<< + * + * def do_wait_suspend(self, *args, **kwargs): + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 308, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 308, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 308, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyDict_Copy(__pyx_v_kwargs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 308, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_v_args, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 308, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":307 + * # ENDIF + * + * def set_suspend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._args[0].set_suspend(*args, **kwargs) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.set_suspend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":310 + * self._args[0].set_suspend(*args, **kwargs) + * + * def do_wait_suspend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._args[0].do_wait_suspend(*args, **kwargs) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_5do_wait_suspend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_5do_wait_suspend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_v_kwargs = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("do_wait_suspend (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "do_wait_suspend", 1))) return NULL; + __pyx_v_kwargs = (__pyx_kwds) ? PyDict_Copy(__pyx_kwds) : PyDict_New(); if (unlikely(!__pyx_v_kwargs)) return NULL; + __Pyx_GOTREF(__pyx_v_kwargs); + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_4do_wait_suspend(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_4do_wait_suspend(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("do_wait_suspend", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":311 + * + * def do_wait_suspend(self, *args, **kwargs): + * self._args[0].do_wait_suspend(*args, **kwargs) # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 311, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 311, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 311, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyDict_Copy(__pyx_v_kwargs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 311, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_v_args, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 311, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":310 + * self._args[0].set_suspend(*args, **kwargs) + * + * def do_wait_suspend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._args[0].do_wait_suspend(*args, **kwargs) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.do_wait_suspend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":314 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * def trace_exception(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef bint should_stop; + * cdef tuple exc_info; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_7trace_exception(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_7trace_exception(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("trace_exception (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_exception", 1, 3, 3, 1); __PYX_ERR(0, 314, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_exception", 1, 3, 3, 2); __PYX_ERR(0, 314, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "trace_exception") < 0)) __PYX_ERR(0, 314, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = ((PyObject*)values[1]); + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("trace_exception", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 314, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_event), (&PyString_Type), 1, "event", 1))) __PYX_ERR(0, 314, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_6trace_exception(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_6trace_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + int __pyx_v_should_stop; + PyObject *__pyx_v_exc_info = 0; + PyObject *__pyx_v_frame_skips_cache = NULL; + PyObject *__pyx_v_frame_cache_key = NULL; + PyObject *__pyx_v_custom_key = NULL; + PyObject *__pyx_v_container_obj = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *(*__pyx_t_7)(PyObject *); + int __pyx_t_8; + int __pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("trace_exception", 0); + __Pyx_INCREF(__pyx_v_frame); + + /* "_pydevd_bundle/pydevd_cython.pyx":320 + * # def trace_exception(self, frame, event, arg): + * # ENDIF + * if event == 'exception': # <<<<<<<<<<<<<< + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * + */ + __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_exception, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 320, __pyx_L1_error) + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":321 + * # ENDIF + * if event == 'exception': + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) # <<<<<<<<<<<<<< + * + * if should_stop: + */ + __pyx_t_3 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_should_stop_on_exception(__pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 321, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { + PyObject* sequence = __pyx_t_3; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 321, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_5 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 321, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 321, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 321, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext; + index = 0; __pyx_t_4 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_4)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + index = 1; __pyx_t_5 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_5)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_5); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 321, __pyx_L1_error) + __pyx_t_7 = NULL; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L5_unpacking_done; + __pyx_L4_unpacking_failed:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_7 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 321, __pyx_L1_error) + __pyx_L5_unpacking_done:; + } + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 321, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_should_stop = __pyx_t_2; + __Pyx_DECREF_SET(__pyx_v_frame, __pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":323 + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * + * if should_stop: # <<<<<<<<<<<<<< + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch + */ + __pyx_t_2 = (__pyx_v_should_stop != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":324 + * + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_EXCEPTION_TYPE_HANDLED); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 324, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 324, __pyx_L1_error) + __pyx_t_5 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_handle_exception(__pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg, ((PyObject*)__pyx_t_3)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 324, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 324, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":325 + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * elif event == 'return': + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 325, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":324 + * + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":323 + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * + * if should_stop: # <<<<<<<<<<<<<< + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":320 + * # def trace_exception(self, frame, event, arg): + * # ENDIF + * if event == 'exception': # <<<<<<<<<<<<<< + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * + */ + goto __pyx_L3; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":327 + * return self.trace_dispatch + * + * elif event == 'return': # <<<<<<<<<<<<<< + * exc_info = self.exc_info + * if exc_info and arg is None: + */ + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_return, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 327, __pyx_L1_error) + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":328 + * + * elif event == 'return': + * exc_info = self.exc_info # <<<<<<<<<<<<<< + * if exc_info and arg is None: + * frame_skips_cache, frame_cache_key = self._args[4], self._args[5] + */ + if (!(likely(PyTuple_CheckExact(__pyx_v_self->exc_info))||((__pyx_v_self->exc_info) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v_self->exc_info)->tp_name), 0))) __PYX_ERR(0, 328, __pyx_L1_error) + __pyx_t_5 = __pyx_v_self->exc_info; + __Pyx_INCREF(__pyx_t_5); + __pyx_v_exc_info = ((PyObject*)__pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":329 + * elif event == 'return': + * exc_info = self.exc_info + * if exc_info and arg is None: # <<<<<<<<<<<<<< + * frame_skips_cache, frame_cache_key = self._args[4], self._args[5] + * custom_key = (frame_cache_key, 'try_exc_info') + */ + __pyx_t_2 = (__pyx_v_exc_info != Py_None)&&(PyTuple_GET_SIZE(__pyx_v_exc_info) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L9_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_arg == Py_None); + __pyx_t_8 = (__pyx_t_2 != 0); + __pyx_t_1 = __pyx_t_8; + __pyx_L9_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":330 + * exc_info = self.exc_info + * if exc_info and arg is None: + * frame_skips_cache, frame_cache_key = self._args[4], self._args[5] # <<<<<<<<<<<<<< + * custom_key = (frame_cache_key, 'try_exc_info') + * container_obj = frame_skips_cache.get(custom_key) + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 330, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 330, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 330, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 330, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_v_frame_skips_cache = __pyx_t_5; + __pyx_t_5 = 0; + __pyx_v_frame_cache_key = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":331 + * if exc_info and arg is None: + * frame_skips_cache, frame_cache_key = self._args[4], self._args[5] + * custom_key = (frame_cache_key, 'try_exc_info') # <<<<<<<<<<<<<< + * container_obj = frame_skips_cache.get(custom_key) + * if container_obj is None: + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 331, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_frame_cache_key); + __Pyx_GIVEREF(__pyx_v_frame_cache_key); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_frame_cache_key); + __Pyx_INCREF(__pyx_n_s_try_exc_info); + __Pyx_GIVEREF(__pyx_n_s_try_exc_info); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_n_s_try_exc_info); + __pyx_v_custom_key = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":332 + * frame_skips_cache, frame_cache_key = self._args[4], self._args[5] + * custom_key = (frame_cache_key, 'try_exc_info') + * container_obj = frame_skips_cache.get(custom_key) # <<<<<<<<<<<<<< + * if container_obj is None: + * container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame_skips_cache, __pyx_n_s_get); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 332, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_4, __pyx_v_custom_key) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_v_custom_key); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 332, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_v_container_obj = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":333 + * custom_key = (frame_cache_key, 'try_exc_info') + * container_obj = frame_skips_cache.get(custom_key) + * if container_obj is None: # <<<<<<<<<<<<<< + * container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() + * if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ + */ + __pyx_t_1 = (__pyx_v_container_obj == Py_None); + __pyx_t_8 = (__pyx_t_1 != 0); + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":334 + * container_obj = frame_skips_cache.get(custom_key) + * if container_obj is None: + * container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() # <<<<<<<<<<<<<< + * if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ + * self.handle_user_exception(frame): + */ + __pyx_t_3 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_container_obj, __pyx_t_3); + if (unlikely(PyObject_SetItem(__pyx_v_frame_skips_cache, __pyx_v_custom_key, __pyx_t_3) < 0)) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":333 + * custom_key = (frame_cache_key, 'try_exc_info') + * container_obj = frame_skips_cache.get(custom_key) + * if container_obj is None: # <<<<<<<<<<<<<< + * container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() + * if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":335 + * if container_obj is None: + * container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() + * if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ # <<<<<<<<<<<<<< + * self.handle_user_exception(frame): + * return self.trace_dispatch + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 335, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 335, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (unlikely(__pyx_v_exc_info == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 335, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_exc_info, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 335, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_9 = __Pyx_PyInt_As_int(__pyx_t_5); if (unlikely((__pyx_t_9 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 335, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__pyx_v_exc_info == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 335, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_exc_info, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 335, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (!(likely(PySet_CheckExact(__pyx_t_5))||((__pyx_t_5) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "set", Py_TYPE(__pyx_t_5)->tp_name), 0))) __PYX_ERR(0, 335, __pyx_L1_error) + __pyx_t_4 = __pyx_f_14_pydevd_bundle_13pydevd_cython_is_unhandled_exception(__pyx_v_container_obj, __pyx_t_3, __pyx_v_frame, __pyx_t_9, ((PyObject*)__pyx_t_5)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 335, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 335, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_1) { + } else { + __pyx_t_8 = __pyx_t_1; + goto __pyx_L13_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":336 + * container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() + * if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ + * self.handle_user_exception(frame): # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_handle_user_exception); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 336, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_4 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_3, __pyx_v_frame) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_v_frame); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 336, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 336, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_8 = __pyx_t_1; + __pyx_L13_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":335 + * if container_obj is None: + * container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() + * if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ # <<<<<<<<<<<<<< + * self.handle_user_exception(frame): + * return self.trace_dispatch + */ + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":337 + * if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ + * self.handle_user_exception(frame): + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * return self.trace_exception + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":335 + * if container_obj is None: + * container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() + * if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ # <<<<<<<<<<<<<< + * self.handle_user_exception(frame): + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":329 + * elif event == 'return': + * exc_info = self.exc_info + * if exc_info and arg is None: # <<<<<<<<<<<<<< + * frame_skips_cache, frame_cache_key = self._args[4], self._args[5] + * custom_key = (frame_cache_key, 'try_exc_info') + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":327 + * return self.trace_dispatch + * + * elif event == 'return': # <<<<<<<<<<<<<< + * exc_info = self.exc_info + * if exc_info and arg is None: + */ + } + __pyx_L3:; + + /* "_pydevd_bundle/pydevd_cython.pyx":339 + * return self.trace_dispatch + * + * return self.trace_exception # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_exception); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 339, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":314 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * def trace_exception(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef bint should_stop; + * cdef tuple exc_info; + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_exc_info); + __Pyx_XDECREF(__pyx_v_frame_skips_cache); + __Pyx_XDECREF(__pyx_v_frame_cache_key); + __Pyx_XDECREF(__pyx_v_custom_key); + __Pyx_XDECREF(__pyx_v_container_obj); + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":342 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _should_stop_on_exception(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef PyDBAdditionalThreadInfo info; + * cdef bint should_stop; + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__should_stop_on_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, CYTHON_UNUSED PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_info = 0; + int __pyx_v_should_stop; + int __pyx_v_was_just_raised; + PyObject *__pyx_v_check_excs = 0; + PyObject *__pyx_v_main_debugger = NULL; + PyObject *__pyx_v_exception = NULL; + PyObject *__pyx_v_value = NULL; + PyObject *__pyx_v_trace = NULL; + PyObject *__pyx_v_exception_breakpoint = NULL; + PyObject *__pyx_v_result = NULL; + PyObject *__pyx_v_exc_break_user = NULL; + PyObject *__pyx_v_exc_break_caught = NULL; + PyObject *__pyx_v_exc_break = NULL; + PyObject *__pyx_v_is_user_uncaught = NULL; + PyObject *__pyx_v_exc_info = NULL; + PyObject *__pyx_v_lines = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *(*__pyx_t_6)(PyObject *); + int __pyx_t_7; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + int __pyx_t_12; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + int __pyx_t_15; + Py_ssize_t __pyx_t_16; + PyObject *__pyx_t_17 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_should_stop_on_exception", 0); + __Pyx_INCREF(__pyx_v_frame); + + /* "_pydevd_bundle/pydevd_cython.pyx":352 + * + * # main_debugger, _filename, info, _thread = self._args + * main_debugger = self._args[0] # <<<<<<<<<<<<<< + * info = self._args[2] + * should_stop = False + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 352, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 352, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_main_debugger = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":353 + * # main_debugger, _filename, info, _thread = self._args + * main_debugger = self._args[0] + * info = self._args[2] # <<<<<<<<<<<<<< + * should_stop = False + * + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 353, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 353, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(0, 353, __pyx_L1_error) + __pyx_v_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":354 + * main_debugger = self._args[0] + * info = self._args[2] + * should_stop = False # <<<<<<<<<<<<<< + * + * # 2 = 2 + */ + __pyx_v_should_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":357 + * + * # 2 = 2 + * if info.pydev_state != 2: # and breakpoint is not None: # <<<<<<<<<<<<<< + * exception, value, trace = arg + * + */ + __pyx_t_2 = ((__pyx_v_info->pydev_state != 2) != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":358 + * # 2 = 2 + * if info.pydev_state != 2: # and breakpoint is not None: + * exception, value, trace = arg # <<<<<<<<<<<<<< + * + * if trace is not None and hasattr(trace, 'tb_next'): + */ + if ((likely(PyTuple_CheckExact(__pyx_v_arg))) || (PyList_CheckExact(__pyx_v_arg))) { + PyObject* sequence = __pyx_v_arg; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 3)) { + if (size > 3) __Pyx_RaiseTooManyValuesError(3); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 358, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 2); + } else { + __pyx_t_1 = PyList_GET_ITEM(sequence, 0); + __pyx_t_3 = PyList_GET_ITEM(sequence, 1); + __pyx_t_4 = PyList_GET_ITEM(sequence, 2); + } + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_1 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 358, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 358, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + Py_ssize_t index = -1; + __pyx_t_5 = PyObject_GetIter(__pyx_v_arg); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 358, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext; + index = 0; __pyx_t_1 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_1)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + index = 1; __pyx_t_3 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_3)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 2; __pyx_t_4 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_5), 3) < 0) __PYX_ERR(0, 358, __pyx_L1_error) + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L5_unpacking_done; + __pyx_L4_unpacking_failed:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 358, __pyx_L1_error) + __pyx_L5_unpacking_done:; + } + __pyx_v_exception = __pyx_t_1; + __pyx_t_1 = 0; + __pyx_v_value = __pyx_t_3; + __pyx_t_3 = 0; + __pyx_v_trace = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":360 + * exception, value, trace = arg + * + * if trace is not None and hasattr(trace, 'tb_next'): # <<<<<<<<<<<<<< + * # on jython trace is None on the first event and it may not have a tb_next. + * + */ + __pyx_t_7 = (__pyx_v_trace != Py_None); + __pyx_t_8 = (__pyx_t_7 != 0); + if (__pyx_t_8) { + } else { + __pyx_t_2 = __pyx_t_8; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_8 = __Pyx_HasAttr(__pyx_v_trace, __pyx_n_s_tb_next); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 360, __pyx_L1_error) + __pyx_t_7 = (__pyx_t_8 != 0); + __pyx_t_2 = __pyx_t_7; + __pyx_L7_bool_binop_done:; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":363 + * # on jython trace is None on the first event and it may not have a tb_next. + * + * should_stop = False # <<<<<<<<<<<<<< + * exception_breakpoint = None + * try: + */ + __pyx_v_should_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":364 + * + * should_stop = False + * exception_breakpoint = None # <<<<<<<<<<<<<< + * try: + * if main_debugger.plugin is not None: + */ + __Pyx_INCREF(Py_None); + __pyx_v_exception_breakpoint = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":365 + * should_stop = False + * exception_breakpoint = None + * try: # <<<<<<<<<<<<<< + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_11); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":366 + * exception_breakpoint = None + * try: + * if main_debugger.plugin is not None: # <<<<<<<<<<<<<< + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_plugin); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 366, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_2 = (__pyx_t_4 != Py_None); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_7 = (__pyx_t_2 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":367 + * try: + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) # <<<<<<<<<<<<<< + * if result: + * should_stop, frame = result + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_plugin); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 367, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_exception_break); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 367, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + __pyx_t_12 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_12 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[6] = {__pyx_t_3, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame, __pyx_v_self->_args, __pyx_v_arg}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 5+__pyx_t_12); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 367, __pyx_L9_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[6] = {__pyx_t_3, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame, __pyx_v_self->_args, __pyx_v_arg}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 5+__pyx_t_12); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 367, __pyx_L9_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_5 = PyTuple_New(5+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 367, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_12, __pyx_v_main_debugger); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_12, ((PyObject *)__pyx_v_self)); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_12, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_5, 3+__pyx_t_12, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_5, 4+__pyx_t_12, __pyx_v_arg); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 367, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":368 + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: # <<<<<<<<<<<<<< + * should_stop, frame = result + * except: + */ + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 368, __pyx_L9_error) + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":369 + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: + * should_stop, frame = result # <<<<<<<<<<<<<< + * except: + * pydev_log.exception() + */ + if ((likely(PyTuple_CheckExact(__pyx_v_result))) || (PyList_CheckExact(__pyx_v_result))) { + PyObject* sequence = __pyx_v_result; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 369, __pyx_L9_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_1 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 369, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 369, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + } else { + Py_ssize_t index = -1; + __pyx_t_5 = PyObject_GetIter(__pyx_v_result); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 369, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext; + index = 0; __pyx_t_4 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L17_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + index = 1; __pyx_t_1 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_1)) goto __pyx_L17_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_5), 2) < 0) __PYX_ERR(0, 369, __pyx_L9_error) + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L18_unpacking_done; + __pyx_L17_unpacking_failed:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 369, __pyx_L9_error) + __pyx_L18_unpacking_done:; + } + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely((__pyx_t_7 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 369, __pyx_L9_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_should_stop = __pyx_t_7; + __Pyx_DECREF_SET(__pyx_v_frame, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":368 + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: # <<<<<<<<<<<<<< + * should_stop, frame = result + * except: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":366 + * exception_breakpoint = None + * try: + * if main_debugger.plugin is not None: # <<<<<<<<<<<<<< + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":365 + * should_stop = False + * exception_breakpoint = None + * try: # <<<<<<<<<<<<<< + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + */ + } + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + goto __pyx_L14_try_end; + __pyx_L9_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":370 + * if result: + * should_stop, frame = result + * except: # <<<<<<<<<<<<<< + * pydev_log.exception() + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._should_stop_on_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_4, &__pyx_t_5) < 0) __PYX_ERR(0, 370, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_5); + + /* "_pydevd_bundle/pydevd_cython.pyx":371 + * should_stop, frame = result + * except: + * pydev_log.exception() # <<<<<<<<<<<<<< + * + * if not should_stop: + */ + __Pyx_GetModuleGlobalName(__pyx_t_13, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 371, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_13); + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_13, __pyx_n_s_exception); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 371, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + __pyx_t_13 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_14))) { + __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_14); + if (likely(__pyx_t_13)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); + __Pyx_INCREF(__pyx_t_13); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_14, function); + } + } + __pyx_t_3 = (__pyx_t_13) ? __Pyx_PyObject_CallOneArg(__pyx_t_14, __pyx_t_13) : __Pyx_PyObject_CallNoArg(__pyx_t_14); + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 371, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L10_exception_handled; + } + __pyx_L11_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":365 + * should_stop = False + * exception_breakpoint = None + * try: # <<<<<<<<<<<<<< + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + */ + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_ExceptionReset(__pyx_t_9, __pyx_t_10, __pyx_t_11); + goto __pyx_L1_error; + __pyx_L10_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_ExceptionReset(__pyx_t_9, __pyx_t_10, __pyx_t_11); + __pyx_L14_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":373 + * pydev_log.exception() + * + * if not should_stop: # <<<<<<<<<<<<<< + * # Apply checks that don't need the exception breakpoint (where we shouldn't ever stop). + * if exception == SystemExit and main_debugger.ignore_system_exit_code(value): + */ + __pyx_t_7 = ((!(__pyx_v_should_stop != 0)) != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":375 + * if not should_stop: + * # Apply checks that don't need the exception breakpoint (where we shouldn't ever stop). + * if exception == SystemExit and main_debugger.ignore_system_exit_code(value): # <<<<<<<<<<<<<< + * pass + * + */ + __pyx_t_5 = PyObject_RichCompare(__pyx_v_exception, __pyx_builtin_SystemExit, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 375, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 375, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_2) { + } else { + __pyx_t_7 = __pyx_t_2; + goto __pyx_L23_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_ignore_system_exit_code); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 375, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_5 = (__pyx_t_1) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_1, __pyx_v_value) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_value); + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 375, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 375, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __pyx_t_2; + __pyx_L23_bool_binop_done:; + if (__pyx_t_7) { + goto __pyx_L22; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":378 + * pass + * + * elif exception in (GeneratorExit, StopIteration, StopAsyncIteration): # <<<<<<<<<<<<<< + * # These exceptions are control-flow related (they work as a generator + * # pause), so, we shouldn't stop on them. + */ + __Pyx_INCREF(__pyx_v_exception); + __pyx_t_5 = __pyx_v_exception; + __pyx_t_4 = PyObject_RichCompare(__pyx_t_5, __pyx_builtin_GeneratorExit, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 378, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!__pyx_t_2) { + } else { + __pyx_t_7 = __pyx_t_2; + goto __pyx_L25_bool_binop_done; + } + __pyx_t_4 = PyObject_RichCompare(__pyx_t_5, __pyx_builtin_StopIteration, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 378, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!__pyx_t_2) { + } else { + __pyx_t_7 = __pyx_t_2; + goto __pyx_L25_bool_binop_done; + } + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_StopAsyncIteration); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyObject_RichCompare(__pyx_t_5, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_7 = __pyx_t_2; + __pyx_L25_bool_binop_done:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_2 = (__pyx_t_7 != 0); + if (__pyx_t_2) { + goto __pyx_L22; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":383 + * pass + * + * elif ignore_exception_trace(trace): # <<<<<<<<<<<<<< + * pass + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_ignore_exception_trace); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 383, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_5 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_4, __pyx_v_trace) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_trace); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 383, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 383, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_2) { + goto __pyx_L22; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":387 + * + * else: + * was_just_raised = trace.tb_next is None # <<<<<<<<<<<<<< + * + * # It was not handled by any plugin, lets check exception breakpoints. + */ + /*else*/ { + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace, __pyx_n_s_tb_next); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 387, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_2 = (__pyx_t_5 == Py_None); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_v_was_just_raised = __pyx_t_2; + + /* "_pydevd_bundle/pydevd_cython.pyx":390 + * + * # It was not handled by any plugin, lets check exception breakpoints. + * check_excs = [] # <<<<<<<<<<<<<< + * + * # Note: check user unhandled before regular exceptions. + */ + __pyx_t_5 = PyList_New(0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_v_check_excs = ((PyObject*)__pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":393 + * + * # Note: check user unhandled before regular exceptions. + * exc_break_user = main_debugger.get_exception_breakpoint( # <<<<<<<<<<<<<< + * exception, main_debugger.break_on_user_uncaught_exceptions) + * if exc_break_user is not None: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_get_exception_breakpoint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 393, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":394 + * # Note: check user unhandled before regular exceptions. + * exc_break_user = main_debugger.get_exception_breakpoint( + * exception, main_debugger.break_on_user_uncaught_exceptions) # <<<<<<<<<<<<<< + * if exc_break_user is not None: + * check_excs.append((exc_break_user, True)) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_user_uncaught_exception); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 394, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = NULL; + __pyx_t_12 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_12 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_exception, __pyx_t_4}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 393, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_exception, __pyx_t_4}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 393, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_14 = PyTuple_New(2+__pyx_t_12); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 393, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_exception); + __Pyx_GIVEREF(__pyx_v_exception); + PyTuple_SET_ITEM(__pyx_t_14, 0+__pyx_t_12, __pyx_v_exception); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_14, 1+__pyx_t_12, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_14, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 393, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_exc_break_user = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":395 + * exc_break_user = main_debugger.get_exception_breakpoint( + * exception, main_debugger.break_on_user_uncaught_exceptions) + * if exc_break_user is not None: # <<<<<<<<<<<<<< + * check_excs.append((exc_break_user, True)) + * + */ + __pyx_t_2 = (__pyx_v_exc_break_user != Py_None); + __pyx_t_7 = (__pyx_t_2 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":396 + * exception, main_debugger.break_on_user_uncaught_exceptions) + * if exc_break_user is not None: + * check_excs.append((exc_break_user, True)) # <<<<<<<<<<<<<< + * + * exc_break_caught = main_debugger.get_exception_breakpoint( + */ + __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 396, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_exc_break_user); + __Pyx_GIVEREF(__pyx_v_exc_break_user); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_exc_break_user); + __Pyx_INCREF(Py_True); + __Pyx_GIVEREF(Py_True); + PyTuple_SET_ITEM(__pyx_t_5, 1, Py_True); + __pyx_t_15 = __Pyx_PyList_Append(__pyx_v_check_excs, __pyx_t_5); if (unlikely(__pyx_t_15 == ((int)-1))) __PYX_ERR(0, 396, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":395 + * exc_break_user = main_debugger.get_exception_breakpoint( + * exception, main_debugger.break_on_user_uncaught_exceptions) + * if exc_break_user is not None: # <<<<<<<<<<<<<< + * check_excs.append((exc_break_user, True)) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":398 + * check_excs.append((exc_break_user, True)) + * + * exc_break_caught = main_debugger.get_exception_breakpoint( # <<<<<<<<<<<<<< + * exception, main_debugger.break_on_caught_exceptions) + * if exc_break_caught is not None: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_get_exception_breakpoint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 398, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":399 + * + * exc_break_caught = main_debugger.get_exception_breakpoint( + * exception, main_debugger.break_on_caught_exceptions) # <<<<<<<<<<<<<< + * if exc_break_caught is not None: + * check_excs.append((exc_break_caught, False)) + */ + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_caught_exceptions); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 399, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_4 = NULL; + __pyx_t_12 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_12 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_exception, __pyx_t_14}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 398, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_exception, __pyx_t_14}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 398, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(2+__pyx_t_12); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 398, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_exception); + __Pyx_GIVEREF(__pyx_v_exception); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_12, __pyx_v_exception); + __Pyx_GIVEREF(__pyx_t_14); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_12, __pyx_t_14); + __pyx_t_14 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 398, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_exc_break_caught = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":400 + * exc_break_caught = main_debugger.get_exception_breakpoint( + * exception, main_debugger.break_on_caught_exceptions) + * if exc_break_caught is not None: # <<<<<<<<<<<<<< + * check_excs.append((exc_break_caught, False)) + * + */ + __pyx_t_7 = (__pyx_v_exc_break_caught != Py_None); + __pyx_t_2 = (__pyx_t_7 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":401 + * exception, main_debugger.break_on_caught_exceptions) + * if exc_break_caught is not None: + * check_excs.append((exc_break_caught, False)) # <<<<<<<<<<<<<< + * + * for exc_break, is_user_uncaught in check_excs: + */ + __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_exc_break_caught); + __Pyx_GIVEREF(__pyx_v_exc_break_caught); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_exc_break_caught); + __Pyx_INCREF(Py_False); + __Pyx_GIVEREF(Py_False); + PyTuple_SET_ITEM(__pyx_t_5, 1, Py_False); + __pyx_t_15 = __Pyx_PyList_Append(__pyx_v_check_excs, __pyx_t_5); if (unlikely(__pyx_t_15 == ((int)-1))) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":400 + * exc_break_caught = main_debugger.get_exception_breakpoint( + * exception, main_debugger.break_on_caught_exceptions) + * if exc_break_caught is not None: # <<<<<<<<<<<<<< + * check_excs.append((exc_break_caught, False)) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":403 + * check_excs.append((exc_break_caught, False)) + * + * for exc_break, is_user_uncaught in check_excs: # <<<<<<<<<<<<<< + * # Initially mark that it should stop and then go into exclusions. + * should_stop = True + */ + __pyx_t_5 = __pyx_v_check_excs; __Pyx_INCREF(__pyx_t_5); __pyx_t_16 = 0; + for (;;) { + if (__pyx_t_16 >= PyList_GET_SIZE(__pyx_t_5)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_5, __pyx_t_16); __Pyx_INCREF(__pyx_t_1); __pyx_t_16++; if (unlikely(0 < 0)) __PYX_ERR(0, 403, __pyx_L1_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_5, __pyx_t_16); __pyx_t_16++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 403, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 403, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_14 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_3 = PyList_GET_ITEM(sequence, 0); + __pyx_t_14 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_14); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 403, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_14 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 403, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_4 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 403, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_6 = Py_TYPE(__pyx_t_4)->tp_iternext; + index = 0; __pyx_t_3 = __pyx_t_6(__pyx_t_4); if (unlikely(!__pyx_t_3)) goto __pyx_L32_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 1; __pyx_t_14 = __pyx_t_6(__pyx_t_4); if (unlikely(!__pyx_t_14)) goto __pyx_L32_unpacking_failed; + __Pyx_GOTREF(__pyx_t_14); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_4), 2) < 0) __PYX_ERR(0, 403, __pyx_L1_error) + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L33_unpacking_done; + __pyx_L32_unpacking_failed:; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 403, __pyx_L1_error) + __pyx_L33_unpacking_done:; + } + __Pyx_XDECREF_SET(__pyx_v_exc_break, __pyx_t_3); + __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_v_is_user_uncaught, __pyx_t_14); + __pyx_t_14 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":405 + * for exc_break, is_user_uncaught in check_excs: + * # Initially mark that it should stop and then go into exclusions. + * should_stop = True # <<<<<<<<<<<<<< + * + * if main_debugger.exclude_exception_by_filter(exc_break, trace): + */ + __pyx_v_should_stop = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":407 + * should_stop = True + * + * if main_debugger.exclude_exception_by_filter(exc_break, trace): # <<<<<<<<<<<<<< + * pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name)) + * should_stop = False + */ + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_exclude_exception_by_filter); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 407, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_3 = NULL; + __pyx_t_12 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_14))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_14); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_14, function); + __pyx_t_12 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_exc_break, __pyx_v_trace}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_14, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 407, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_exc_break, __pyx_v_trace}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_14, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 407, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_4 = PyTuple_New(2+__pyx_t_12); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 407, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_exc_break); + __Pyx_GIVEREF(__pyx_v_exc_break); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_12, __pyx_v_exc_break); + __Pyx_INCREF(__pyx_v_trace); + __Pyx_GIVEREF(__pyx_v_trace); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_12, __pyx_v_trace); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_14, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 407, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 407, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":408 + * + * if main_debugger.exclude_exception_by_filter(exc_break, trace): + * pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name)) # <<<<<<<<<<<<<< + * should_stop = False + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_14, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_debug); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_13 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_co_name); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = PyTuple_New(3); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_INCREF(__pyx_v_exception); + __Pyx_GIVEREF(__pyx_v_exception); + PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_v_exception); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_14, 1, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_13); + PyTuple_SET_ITEM(__pyx_t_14, 2, __pyx_t_13); + __pyx_t_3 = 0; + __pyx_t_13 = 0; + __pyx_t_13 = __Pyx_PyString_Format(__pyx_kp_s_Ignore_exception_s_in_library_s, __pyx_t_14); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_1 = (__pyx_t_14) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_14, __pyx_t_13) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_13); + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":409 + * if main_debugger.exclude_exception_by_filter(exc_break, trace): + * pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name)) + * should_stop = False # <<<<<<<<<<<<<< + * + * elif exc_break.condition is not None and \ + */ + __pyx_v_should_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":407 + * should_stop = True + * + * if main_debugger.exclude_exception_by_filter(exc_break, trace): # <<<<<<<<<<<<<< + * pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name)) + * should_stop = False + */ + goto __pyx_L34; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":411 + * should_stop = False + * + * elif exc_break.condition is not None and \ # <<<<<<<<<<<<<< + * not main_debugger.handle_breakpoint_condition(info, exc_break, frame): + * should_stop = False + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_exc_break, __pyx_n_s_condition); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 411, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = (__pyx_t_1 != Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_8 = (__pyx_t_7 != 0); + if (__pyx_t_8) { + } else { + __pyx_t_2 = __pyx_t_8; + goto __pyx_L35_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":412 + * + * elif exc_break.condition is not None and \ + * not main_debugger.handle_breakpoint_condition(info, exc_break, frame): # <<<<<<<<<<<<<< + * should_stop = False + * + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_handle_breakpoint_condition); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 412, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_13 = NULL; + __pyx_t_12 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_13)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_13); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_12 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_13, ((PyObject *)__pyx_v_info), __pyx_v_exc_break, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 412, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_13, ((PyObject *)__pyx_v_info), __pyx_v_exc_break, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 412, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_14 = PyTuple_New(3+__pyx_t_12); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 412, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + if (__pyx_t_13) { + __Pyx_GIVEREF(__pyx_t_13); PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_t_13); __pyx_t_13 = NULL; + } + __Pyx_INCREF(((PyObject *)__pyx_v_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_info)); + PyTuple_SET_ITEM(__pyx_t_14, 0+__pyx_t_12, ((PyObject *)__pyx_v_info)); + __Pyx_INCREF(__pyx_v_exc_break); + __Pyx_GIVEREF(__pyx_v_exc_break); + PyTuple_SET_ITEM(__pyx_t_14, 1+__pyx_t_12, __pyx_v_exc_break); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_14, 2+__pyx_t_12, __pyx_v_frame); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_14, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 412, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 412, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_7 = ((!__pyx_t_8) != 0); + __pyx_t_2 = __pyx_t_7; + __pyx_L35_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":411 + * should_stop = False + * + * elif exc_break.condition is not None and \ # <<<<<<<<<<<<<< + * not main_debugger.handle_breakpoint_condition(info, exc_break, frame): + * should_stop = False + */ + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":413 + * elif exc_break.condition is not None and \ + * not main_debugger.handle_breakpoint_condition(info, exc_break, frame): + * should_stop = False # <<<<<<<<<<<<<< + * + * elif is_user_uncaught: + */ + __pyx_v_should_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":411 + * should_stop = False + * + * elif exc_break.condition is not None and \ # <<<<<<<<<<<<<< + * not main_debugger.handle_breakpoint_condition(info, exc_break, frame): + * should_stop = False + */ + goto __pyx_L34; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":415 + * should_stop = False + * + * elif is_user_uncaught: # <<<<<<<<<<<<<< + * # Note: we don't stop here, we just collect the exc_info to use later on... + * should_stop = False + */ + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_is_user_uncaught); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 415, __pyx_L1_error) + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":417 + * elif is_user_uncaught: + * # Note: we don't stop here, we just collect the exc_info to use later on... + * should_stop = False # <<<<<<<<<<<<<< + * if not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) \ + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)): + */ + __pyx_v_should_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":418 + * # Note: we don't stop here, we just collect the exc_info to use later on... + * should_stop = False + * if not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) \ # <<<<<<<<<<<<<< + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)): + * # User uncaught means that we're currently in user code but the code + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_13 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = NULL; + __pyx_t_12 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_12 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_v_frame, __pyx_t_13, Py_True}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_v_frame, __pyx_t_13, Py_True}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(3+__pyx_t_12); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_14) { + __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_14); __pyx_t_14 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_12, __pyx_v_frame); + __Pyx_GIVEREF(__pyx_t_13); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_12, __pyx_t_13); + __Pyx_INCREF(Py_True); + __Pyx_GIVEREF(Py_True); + PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_12, Py_True); + __pyx_t_13 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_8 = ((!__pyx_t_7) != 0); + if (__pyx_t_8) { + } else { + __pyx_t_2 = __pyx_t_8; + goto __pyx_L38_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":419 + * should_stop = False + * if not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) \ + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)): # <<<<<<<<<<<<<< + * # User uncaught means that we're currently in user code but the code + * # up the stack is library code. + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = (__pyx_t_1 == Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_7 = (__pyx_t_8 != 0); + if (!__pyx_t_7) { + } else { + __pyx_t_2 = __pyx_t_7; + goto __pyx_L38_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_13 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_13, __pyx_n_s_f_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + __pyx_t_13 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = NULL; + __pyx_t_12 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_12 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_t_3, __pyx_t_13, Py_True}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_t_3, __pyx_t_13, Py_True}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + } else + #endif + { + __pyx_t_17 = PyTuple_New(3+__pyx_t_12); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + if (__pyx_t_14) { + __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_17, 0, __pyx_t_14); __pyx_t_14 = NULL; + } + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_17, 0+__pyx_t_12, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_13); + PyTuple_SET_ITEM(__pyx_t_17, 1+__pyx_t_12, __pyx_t_13); + __Pyx_INCREF(Py_True); + __Pyx_GIVEREF(Py_True); + PyTuple_SET_ITEM(__pyx_t_17, 2+__pyx_t_12, Py_True); + __pyx_t_3 = 0; + __pyx_t_13 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_17, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = __pyx_t_7; + __pyx_L38_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":418 + * # Note: we don't stop here, we just collect the exc_info to use later on... + * should_stop = False + * if not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) \ # <<<<<<<<<<<<<< + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)): + * # User uncaught means that we're currently in user code but the code + */ + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":422 + * # User uncaught means that we're currently in user code but the code + * # up the stack is library code. + * exc_info = self.exc_info # <<<<<<<<<<<<<< + * if not exc_info: + * exc_info = (arg, frame.f_lineno, set([frame.f_lineno])) + */ + __pyx_t_1 = __pyx_v_self->exc_info; + __Pyx_INCREF(__pyx_t_1); + __Pyx_XDECREF_SET(__pyx_v_exc_info, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":423 + * # up the stack is library code. + * exc_info = self.exc_info + * if not exc_info: # <<<<<<<<<<<<<< + * exc_info = (arg, frame.f_lineno, set([frame.f_lineno])) + * else: + */ + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_exc_info); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 423, __pyx_L1_error) + __pyx_t_7 = ((!__pyx_t_2) != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":424 + * exc_info = self.exc_info + * if not exc_info: + * exc_info = (arg, frame.f_lineno, set([frame.f_lineno])) # <<<<<<<<<<<<<< + * else: + * lines = exc_info[2] + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 424, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 424, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_17 = PySet_New(0); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 424, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + if (PySet_Add(__pyx_t_17, __pyx_t_4) < 0) __PYX_ERR(0, 424, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 424, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_arg); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_17); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_17); + __pyx_t_1 = 0; + __pyx_t_17 = 0; + __Pyx_DECREF_SET(__pyx_v_exc_info, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":423 + * # up the stack is library code. + * exc_info = self.exc_info + * if not exc_info: # <<<<<<<<<<<<<< + * exc_info = (arg, frame.f_lineno, set([frame.f_lineno])) + * else: + */ + goto __pyx_L41; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":426 + * exc_info = (arg, frame.f_lineno, set([frame.f_lineno])) + * else: + * lines = exc_info[2] # <<<<<<<<<<<<<< + * lines.add(frame.f_lineno) + * exc_info = (arg, frame.f_lineno, lines) + */ + /*else*/ { + __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_exc_info, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 426, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_lines, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":427 + * else: + * lines = exc_info[2] + * lines.add(frame.f_lineno) # <<<<<<<<<<<<<< + * exc_info = (arg, frame.f_lineno, lines) + * self.exc_info = exc_info + */ + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_lines, __pyx_n_s_add); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 427, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 427, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_13 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_17))) { + __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_17); + if (likely(__pyx_t_13)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_17); + __Pyx_INCREF(__pyx_t_13); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_17, function); + } + } + __pyx_t_4 = (__pyx_t_13) ? __Pyx_PyObject_Call2Args(__pyx_t_17, __pyx_t_13, __pyx_t_1) : __Pyx_PyObject_CallOneArg(__pyx_t_17, __pyx_t_1); + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 427, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":428 + * lines = exc_info[2] + * lines.add(frame.f_lineno) + * exc_info = (arg, frame.f_lineno, lines) # <<<<<<<<<<<<<< + * self.exc_info = exc_info + * else: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 428, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_17 = PyTuple_New(3); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 428, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_17, 0, __pyx_v_arg); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_17, 1, __pyx_t_4); + __Pyx_INCREF(__pyx_v_lines); + __Pyx_GIVEREF(__pyx_v_lines); + PyTuple_SET_ITEM(__pyx_t_17, 2, __pyx_v_lines); + __pyx_t_4 = 0; + __Pyx_DECREF_SET(__pyx_v_exc_info, __pyx_t_17); + __pyx_t_17 = 0; + } + __pyx_L41:; + + /* "_pydevd_bundle/pydevd_cython.pyx":429 + * lines.add(frame.f_lineno) + * exc_info = (arg, frame.f_lineno, lines) + * self.exc_info = exc_info # <<<<<<<<<<<<<< + * else: + * # I.e.: these are only checked if we're not dealing with user uncaught exceptions. + */ + __Pyx_INCREF(__pyx_v_exc_info); + __Pyx_GIVEREF(__pyx_v_exc_info); + __Pyx_GOTREF(__pyx_v_self->exc_info); + __Pyx_DECREF(__pyx_v_self->exc_info); + __pyx_v_self->exc_info = __pyx_v_exc_info; + + /* "_pydevd_bundle/pydevd_cython.pyx":418 + * # Note: we don't stop here, we just collect the exc_info to use later on... + * should_stop = False + * if not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) \ # <<<<<<<<<<<<<< + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)): + * # User uncaught means that we're currently in user code but the code + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":415 + * should_stop = False + * + * elif is_user_uncaught: # <<<<<<<<<<<<<< + * # Note: we don't stop here, we just collect the exc_info to use later on... + * should_stop = False + */ + goto __pyx_L34; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":432 + * else: + * # I.e.: these are only checked if we're not dealing with user uncaught exceptions. + * if exc_break.notify_on_first_raise_only and main_debugger.skip_on_exceptions_thrown_in_same_context \ # <<<<<<<<<<<<<< + * and not was_just_raised and not just_raised(trace.tb_next): + * # In this case we never stop if it was just raised, so, to know if it was the first we + */ + /*else*/ { + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_exc_break, __pyx_n_s_notify_on_first_raise_only); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_17); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + if (__pyx_t_2) { + } else { + __pyx_t_7 = __pyx_t_2; + goto __pyx_L43_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":433 + * # I.e.: these are only checked if we're not dealing with user uncaught exceptions. + * if exc_break.notify_on_first_raise_only and main_debugger.skip_on_exceptions_thrown_in_same_context \ + * and not was_just_raised and not just_raised(trace.tb_next): # <<<<<<<<<<<<<< + * # In this case we never stop if it was just raised, so, to know if it was the first we + * # need to check if we're in the 2nd method. + */ + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_skip_on_exceptions_thrown_in_sam); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + + /* "_pydevd_bundle/pydevd_cython.pyx":432 + * else: + * # I.e.: these are only checked if we're not dealing with user uncaught exceptions. + * if exc_break.notify_on_first_raise_only and main_debugger.skip_on_exceptions_thrown_in_same_context \ # <<<<<<<<<<<<<< + * and not was_just_raised and not just_raised(trace.tb_next): + * # In this case we never stop if it was just raised, so, to know if it was the first we + */ + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_17); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + if (__pyx_t_2) { + } else { + __pyx_t_7 = __pyx_t_2; + goto __pyx_L43_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":433 + * # I.e.: these are only checked if we're not dealing with user uncaught exceptions. + * if exc_break.notify_on_first_raise_only and main_debugger.skip_on_exceptions_thrown_in_same_context \ + * and not was_just_raised and not just_raised(trace.tb_next): # <<<<<<<<<<<<<< + * # In this case we never stop if it was just raised, so, to know if it was the first we + * # need to check if we're in the 2nd method. + */ + __pyx_t_2 = ((!(__pyx_v_was_just_raised != 0)) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_7 = __pyx_t_2; + goto __pyx_L43_bool_binop_done; + } + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_just_raised); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace, __pyx_n_s_tb_next); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_13 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_13)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_13); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_17 = (__pyx_t_13) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_13, __pyx_t_1) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_1); + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_17); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __pyx_t_8 = ((!__pyx_t_2) != 0); + __pyx_t_7 = __pyx_t_8; + __pyx_L43_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":432 + * else: + * # I.e.: these are only checked if we're not dealing with user uncaught exceptions. + * if exc_break.notify_on_first_raise_only and main_debugger.skip_on_exceptions_thrown_in_same_context \ # <<<<<<<<<<<<<< + * and not was_just_raised and not just_raised(trace.tb_next): + * # In this case we never stop if it was just raised, so, to know if it was the first we + */ + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":436 + * # In this case we never stop if it was just raised, so, to know if it was the first we + * # need to check if we're in the 2nd method. + * should_stop = False # I.e.: we stop only when we're at the caller of a method that throws an exception # <<<<<<<<<<<<<< + * + * elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ + */ + __pyx_v_should_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":432 + * else: + * # I.e.: these are only checked if we're not dealing with user uncaught exceptions. + * if exc_break.notify_on_first_raise_only and main_debugger.skip_on_exceptions_thrown_in_same_context \ # <<<<<<<<<<<<<< + * and not was_just_raised and not just_raised(trace.tb_next): + * # In this case we never stop if it was just raised, so, to know if it was the first we + */ + goto __pyx_L42; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":438 + * should_stop = False # I.e.: we stop only when we're at the caller of a method that throws an exception + * + * elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ # <<<<<<<<<<<<<< + * and not was_just_raised: + * should_stop = False # I.e.: we stop only when it was just raised + */ + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_exc_break, __pyx_n_s_notify_on_first_raise_only); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 438, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_17); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 438, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + if (__pyx_t_8) { + } else { + __pyx_t_7 = __pyx_t_8; + goto __pyx_L47_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":439 + * + * elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ + * and not was_just_raised: # <<<<<<<<<<<<<< + * should_stop = False # I.e.: we stop only when it was just raised + * + */ + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_skip_on_exceptions_thrown_in_sam); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 438, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + + /* "_pydevd_bundle/pydevd_cython.pyx":438 + * should_stop = False # I.e.: we stop only when we're at the caller of a method that throws an exception + * + * elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ # <<<<<<<<<<<<<< + * and not was_just_raised: + * should_stop = False # I.e.: we stop only when it was just raised + */ + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_17); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 438, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __pyx_t_2 = ((!__pyx_t_8) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_7 = __pyx_t_2; + goto __pyx_L47_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":439 + * + * elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ + * and not was_just_raised: # <<<<<<<<<<<<<< + * should_stop = False # I.e.: we stop only when it was just raised + * + */ + __pyx_t_2 = ((!(__pyx_v_was_just_raised != 0)) != 0); + __pyx_t_7 = __pyx_t_2; + __pyx_L47_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":438 + * should_stop = False # I.e.: we stop only when we're at the caller of a method that throws an exception + * + * elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ # <<<<<<<<<<<<<< + * and not was_just_raised: + * should_stop = False # I.e.: we stop only when it was just raised + */ + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":440 + * elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ + * and not was_just_raised: + * should_stop = False # I.e.: we stop only when it was just raised # <<<<<<<<<<<<<< + * + * elif was_just_raised and main_debugger.skip_on_exceptions_thrown_in_same_context: + */ + __pyx_v_should_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":438 + * should_stop = False # I.e.: we stop only when we're at the caller of a method that throws an exception + * + * elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ # <<<<<<<<<<<<<< + * and not was_just_raised: + * should_stop = False # I.e.: we stop only when it was just raised + */ + goto __pyx_L42; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":442 + * should_stop = False # I.e.: we stop only when it was just raised + * + * elif was_just_raised and main_debugger.skip_on_exceptions_thrown_in_same_context: # <<<<<<<<<<<<<< + * # Option: Don't break if an exception is caught in the same function from which it is thrown + * should_stop = False + */ + __pyx_t_2 = (__pyx_v_was_just_raised != 0); + if (__pyx_t_2) { + } else { + __pyx_t_7 = __pyx_t_2; + goto __pyx_L50_bool_binop_done; + } + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_skip_on_exceptions_thrown_in_sam); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 442, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_17); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 442, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __pyx_t_7 = __pyx_t_2; + __pyx_L50_bool_binop_done:; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":444 + * elif was_just_raised and main_debugger.skip_on_exceptions_thrown_in_same_context: + * # Option: Don't break if an exception is caught in the same function from which it is thrown + * should_stop = False # <<<<<<<<<<<<<< + * + * if should_stop: + */ + __pyx_v_should_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":442 + * should_stop = False # I.e.: we stop only when it was just raised + * + * elif was_just_raised and main_debugger.skip_on_exceptions_thrown_in_same_context: # <<<<<<<<<<<<<< + * # Option: Don't break if an exception is caught in the same function from which it is thrown + * should_stop = False + */ + } + __pyx_L42:; + } + __pyx_L34:; + + /* "_pydevd_bundle/pydevd_cython.pyx":446 + * should_stop = False + * + * if should_stop: # <<<<<<<<<<<<<< + * exception_breakpoint = exc_break + * try: + */ + __pyx_t_7 = (__pyx_v_should_stop != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":447 + * + * if should_stop: + * exception_breakpoint = exc_break # <<<<<<<<<<<<<< + * try: + * info.pydev_message = exc_break.qname + */ + __Pyx_INCREF(__pyx_v_exc_break); + __Pyx_DECREF_SET(__pyx_v_exception_breakpoint, __pyx_v_exc_break); + + /* "_pydevd_bundle/pydevd_cython.pyx":448 + * if should_stop: + * exception_breakpoint = exc_break + * try: # <<<<<<<<<<<<<< + * info.pydev_message = exc_break.qname + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_11, &__pyx_t_10, &__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_9); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":449 + * exception_breakpoint = exc_break + * try: + * info.pydev_message = exc_break.qname # <<<<<<<<<<<<<< + * except: + * info.pydev_message = exc_break.qname.encode('utf-8') + */ + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_exc_break, __pyx_n_s_qname); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 449, __pyx_L53_error) + __Pyx_GOTREF(__pyx_t_17); + if (!(likely(PyString_CheckExact(__pyx_t_17))||((__pyx_t_17) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_17)->tp_name), 0))) __PYX_ERR(0, 449, __pyx_L53_error) + __Pyx_GIVEREF(__pyx_t_17); + __Pyx_GOTREF(__pyx_v_info->pydev_message); + __Pyx_DECREF(__pyx_v_info->pydev_message); + __pyx_v_info->pydev_message = ((PyObject*)__pyx_t_17); + __pyx_t_17 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":448 + * if should_stop: + * exception_breakpoint = exc_break + * try: # <<<<<<<<<<<<<< + * info.pydev_message = exc_break.qname + * except: + */ + } + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L60_try_end; + __pyx_L53_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":450 + * try: + * info.pydev_message = exc_break.qname + * except: # <<<<<<<<<<<<<< + * info.pydev_message = exc_break.qname.encode('utf-8') + * break + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._should_stop_on_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_17, &__pyx_t_4, &__pyx_t_1) < 0) __PYX_ERR(0, 450, __pyx_L55_except_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":451 + * info.pydev_message = exc_break.qname + * except: + * info.pydev_message = exc_break.qname.encode('utf-8') # <<<<<<<<<<<<<< + * break + * + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_exc_break, __pyx_n_s_qname); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 451, __pyx_L55_except_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_encode); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 451, __pyx_L55_except_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_14))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_14); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_14, function); + } + } + __pyx_t_13 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_14, __pyx_t_3, __pyx_kp_s_utf_8) : __Pyx_PyObject_CallOneArg(__pyx_t_14, __pyx_kp_s_utf_8); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 451, __pyx_L55_except_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_13))||((__pyx_t_13) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_13)->tp_name), 0))) __PYX_ERR(0, 451, __pyx_L55_except_error) + __Pyx_GIVEREF(__pyx_t_13); + __Pyx_GOTREF(__pyx_v_info->pydev_message); + __Pyx_DECREF(__pyx_v_info->pydev_message); + __pyx_v_info->pydev_message = ((PyObject*)__pyx_t_13); + __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L54_exception_handled; + } + __pyx_L55_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":448 + * if should_stop: + * exception_breakpoint = exc_break + * try: # <<<<<<<<<<<<<< + * info.pydev_message = exc_break.qname + * except: + */ + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_ExceptionReset(__pyx_t_11, __pyx_t_10, __pyx_t_9); + goto __pyx_L1_error; + __pyx_L54_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_ExceptionReset(__pyx_t_11, __pyx_t_10, __pyx_t_9); + __pyx_L60_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":452 + * except: + * info.pydev_message = exc_break.qname.encode('utf-8') + * break # <<<<<<<<<<<<<< + * + * if should_stop: + */ + goto __pyx_L31_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":446 + * should_stop = False + * + * if should_stop: # <<<<<<<<<<<<<< + * exception_breakpoint = exc_break + * try: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":403 + * check_excs.append((exc_break_caught, False)) + * + * for exc_break, is_user_uncaught in check_excs: # <<<<<<<<<<<<<< + * # Initially mark that it should stop and then go into exclusions. + * should_stop = True + */ + } + __pyx_L31_break:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __pyx_L22:; + + /* "_pydevd_bundle/pydevd_cython.pyx":373 + * pydev_log.exception() + * + * if not should_stop: # <<<<<<<<<<<<<< + * # Apply checks that don't need the exception breakpoint (where we shouldn't ever stop). + * if exception == SystemExit and main_debugger.ignore_system_exit_code(value): + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":454 + * break + * + * if should_stop: # <<<<<<<<<<<<<< + * # Always add exception to frame (must remove later after we proceed). + * add_exception_to_frame(frame, (exception, value, trace)) + */ + __pyx_t_7 = (__pyx_v_should_stop != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":456 + * if should_stop: + * # Always add exception to frame (must remove later after we proceed). + * add_exception_to_frame(frame, (exception, value, trace)) # <<<<<<<<<<<<<< + * + * if exception_breakpoint is not None and exception_breakpoint.expression is not None: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_add_exception_to_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 456, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 456, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(__pyx_v_exception); + __Pyx_GIVEREF(__pyx_v_exception); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_exception); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_value); + __Pyx_INCREF(__pyx_v_trace); + __Pyx_GIVEREF(__pyx_v_trace); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_trace); + __pyx_t_17 = NULL; + __pyx_t_12 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_17 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_17)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_17); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_12 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[3] = {__pyx_t_17, __pyx_v_frame, __pyx_t_4}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 456, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[3] = {__pyx_t_17, __pyx_v_frame, __pyx_t_4}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 456, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_13 = PyTuple_New(2+__pyx_t_12); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 456, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + if (__pyx_t_17) { + __Pyx_GIVEREF(__pyx_t_17); PyTuple_SET_ITEM(__pyx_t_13, 0, __pyx_t_17); __pyx_t_17 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_13, 0+__pyx_t_12, __pyx_v_frame); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_13, 1+__pyx_t_12, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_13, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 456, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":458 + * add_exception_to_frame(frame, (exception, value, trace)) + * + * if exception_breakpoint is not None and exception_breakpoint.expression is not None: # <<<<<<<<<<<<<< + * main_debugger.handle_breakpoint_expression(exception_breakpoint, info, frame) + * + */ + __pyx_t_2 = (__pyx_v_exception_breakpoint != Py_None); + __pyx_t_8 = (__pyx_t_2 != 0); + if (__pyx_t_8) { + } else { + __pyx_t_7 = __pyx_t_8; + goto __pyx_L65_bool_binop_done; + } + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_exception_breakpoint, __pyx_n_s_expression); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 458, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_8 = (__pyx_t_5 != Py_None); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_2 = (__pyx_t_8 != 0); + __pyx_t_7 = __pyx_t_2; + __pyx_L65_bool_binop_done:; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":459 + * + * if exception_breakpoint is not None and exception_breakpoint.expression is not None: + * main_debugger.handle_breakpoint_expression(exception_breakpoint, info, frame) # <<<<<<<<<<<<<< + * + * return should_stop, frame + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_handle_breakpoint_expression); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 459, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_13 = NULL; + __pyx_t_12 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_13)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_13); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_12 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[4] = {__pyx_t_13, __pyx_v_exception_breakpoint, ((PyObject *)__pyx_v_info), __pyx_v_frame}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 459, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[4] = {__pyx_t_13, __pyx_v_exception_breakpoint, ((PyObject *)__pyx_v_info), __pyx_v_frame}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 459, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_4 = PyTuple_New(3+__pyx_t_12); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 459, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_13) { + __Pyx_GIVEREF(__pyx_t_13); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_13); __pyx_t_13 = NULL; + } + __Pyx_INCREF(__pyx_v_exception_breakpoint); + __Pyx_GIVEREF(__pyx_v_exception_breakpoint); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_12, __pyx_v_exception_breakpoint); + __Pyx_INCREF(((PyObject *)__pyx_v_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_info)); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_12, ((PyObject *)__pyx_v_info)); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_12, __pyx_v_frame); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 459, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":458 + * add_exception_to_frame(frame, (exception, value, trace)) + * + * if exception_breakpoint is not None and exception_breakpoint.expression is not None: # <<<<<<<<<<<<<< + * main_debugger.handle_breakpoint_expression(exception_breakpoint, info, frame) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":454 + * break + * + * if should_stop: # <<<<<<<<<<<<<< + * # Always add exception to frame (must remove later after we proceed). + * add_exception_to_frame(frame, (exception, value, trace)) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":360 + * exception, value, trace = arg + * + * if trace is not None and hasattr(trace, 'tb_next'): # <<<<<<<<<<<<<< + * # on jython trace is None on the first event and it may not have a tb_next. + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":357 + * + * # 2 = 2 + * if info.pydev_state != 2: # and breakpoint is not None: # <<<<<<<<<<<<<< + * exception, value, trace = arg + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":461 + * main_debugger.handle_breakpoint_expression(exception_breakpoint, info, frame) + * + * return should_stop, frame # <<<<<<<<<<<<<< + * + * def handle_user_exception(self, frame): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_5 = __Pyx_PyBool_FromLong(__pyx_v_should_stop); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 461, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 461, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_5); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_frame); + __pyx_t_5 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":342 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _should_stop_on_exception(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef PyDBAdditionalThreadInfo info; + * cdef bint should_stop; + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_13); + __Pyx_XDECREF(__pyx_t_14); + __Pyx_XDECREF(__pyx_t_17); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._should_stop_on_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_info); + __Pyx_XDECREF(__pyx_v_check_excs); + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_exception); + __Pyx_XDECREF(__pyx_v_value); + __Pyx_XDECREF(__pyx_v_trace); + __Pyx_XDECREF(__pyx_v_exception_breakpoint); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XDECREF(__pyx_v_exc_break_user); + __Pyx_XDECREF(__pyx_v_exc_break_caught); + __Pyx_XDECREF(__pyx_v_exc_break); + __Pyx_XDECREF(__pyx_v_is_user_uncaught); + __Pyx_XDECREF(__pyx_v_exc_info); + __Pyx_XDECREF(__pyx_v_lines); + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":463 + * return should_stop, frame + * + * def handle_user_exception(self, frame): # <<<<<<<<<<<<<< + * exc_info = self.exc_info + * if exc_info: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_9handle_user_exception(PyObject *__pyx_v_self, PyObject *__pyx_v_frame); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_9handle_user_exception(PyObject *__pyx_v_self, PyObject *__pyx_v_frame) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("handle_user_exception (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8handle_user_exception(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), ((PyObject *)__pyx_v_frame)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8handle_user_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame) { + PyObject *__pyx_v_exc_info = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("handle_user_exception", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":464 + * + * def handle_user_exception(self, frame): + * exc_info = self.exc_info # <<<<<<<<<<<<<< + * if exc_info: + * return self._handle_exception(frame, 'exception', exc_info[0], EXCEPTION_TYPE_USER_UNHANDLED) + */ + __pyx_t_1 = __pyx_v_self->exc_info; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_exc_info = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":465 + * def handle_user_exception(self, frame): + * exc_info = self.exc_info + * if exc_info: # <<<<<<<<<<<<<< + * return self._handle_exception(frame, 'exception', exc_info[0], EXCEPTION_TYPE_USER_UNHANDLED) + * return False + */ + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_exc_info); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 465, __pyx_L1_error) + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":466 + * exc_info = self.exc_info + * if exc_info: + * return self._handle_exception(frame, 'exception', exc_info[0], EXCEPTION_TYPE_USER_UNHANDLED) # <<<<<<<<<<<<<< + * return False + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_exc_info, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 466, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 466, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 466, __pyx_L1_error) + __pyx_t_4 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_handle_exception(__pyx_v_self, __pyx_v_frame, __pyx_n_s_exception, __pyx_t_1, ((PyObject*)__pyx_t_3)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 466, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":465 + * def handle_user_exception(self, frame): + * exc_info = self.exc_info + * if exc_info: # <<<<<<<<<<<<<< + * return self._handle_exception(frame, 'exception', exc_info[0], EXCEPTION_TYPE_USER_UNHANDLED) + * return False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":467 + * if exc_info: + * return self._handle_exception(frame, 'exception', exc_info[0], EXCEPTION_TYPE_USER_UNHANDLED) + * return False # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":463 + * return should_stop, frame + * + * def handle_user_exception(self, frame): # <<<<<<<<<<<<<< + * exc_info = self.exc_info + * if exc_info: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.handle_user_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_exc_info); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":470 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _handle_exception(self, frame, str event, arg, str exception_type): # <<<<<<<<<<<<<< + * cdef bint stopped; + * cdef tuple abs_real_path_and_base; + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__handle_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg, PyObject *__pyx_v_exception_type) { + int __pyx_v_stopped; + PyObject *__pyx_v_abs_real_path_and_base = 0; + PyObject *__pyx_v_absolute_filename = 0; + PyObject *__pyx_v_canonical_normalized_filename = 0; + PyObject *__pyx_v_filename_to_lines_where_exceptions_are_ignored = 0; + PyObject *__pyx_v_lines_ignored = 0; + PyObject *__pyx_v_frame_id_to_frame = 0; + PyObject *__pyx_v_merged = 0; + PyObject *__pyx_v_trace_obj = 0; + PyObject *__pyx_v_main_debugger = 0; + PyObject *__pyx_v_initial_trace_obj = NULL; + PyObject *__pyx_v_check_trace_obj = NULL; + PyObject *__pyx_v_curr_stat = NULL; + PyObject *__pyx_v_last_stat = NULL; + PyObject *__pyx_v_from_user_input = NULL; + PyObject *__pyx_v_exc_lineno = NULL; + PyObject *__pyx_v_line = NULL; + PyObject *__pyx_v_thread = NULL; + PyObject *__pyx_v_f = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + Py_ssize_t __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + int __pyx_t_13; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + int __pyx_t_16; + PyObject *__pyx_t_17 = NULL; + int __pyx_t_18; + char const *__pyx_t_19; + PyObject *__pyx_t_20 = NULL; + PyObject *__pyx_t_21 = NULL; + PyObject *__pyx_t_22 = NULL; + PyObject *__pyx_t_23 = NULL; + PyObject *__pyx_t_24 = NULL; + PyObject *__pyx_t_25 = NULL; + char const *__pyx_t_26; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_handle_exception", 0); + __Pyx_INCREF(__pyx_v_frame); + + /* "_pydevd_bundle/pydevd_cython.pyx":484 + * # def _handle_exception(self, frame, event, arg, exception_type): + * # ENDIF + * stopped = False # <<<<<<<<<<<<<< + * try: + * # print('_handle_exception', frame.f_lineno, frame.f_code.co_name) + */ + __pyx_v_stopped = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":485 + * # ENDIF + * stopped = False + * try: # <<<<<<<<<<<<<< + * # print('_handle_exception', frame.f_lineno, frame.f_code.co_name) + * + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":489 + * + * # We have 3 things in arg: exception type, description, traceback object + * trace_obj = arg[2] # <<<<<<<<<<<<<< + * main_debugger = self._args[0] + * + */ + __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_arg, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 489, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_trace_obj = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":490 + * # We have 3 things in arg: exception type, description, traceback object + * trace_obj = arg[2] + * main_debugger = self._args[0] # <<<<<<<<<<<<<< + * + * initial_trace_obj = trace_obj + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 490, __pyx_L4_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 490, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_main_debugger = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":492 + * main_debugger = self._args[0] + * + * initial_trace_obj = trace_obj # <<<<<<<<<<<<<< + * if trace_obj.tb_next is None and trace_obj.tb_frame is frame: + * # I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + */ + __Pyx_INCREF(__pyx_v_trace_obj); + __pyx_v_initial_trace_obj = __pyx_v_trace_obj; + + /* "_pydevd_bundle/pydevd_cython.pyx":493 + * + * initial_trace_obj = trace_obj + * if trace_obj.tb_next is None and trace_obj.tb_frame is frame: # <<<<<<<<<<<<<< + * # I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + * pass + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_next); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 493, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = (__pyx_t_1 == Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 493, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = (__pyx_t_1 == __pyx_v_frame); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_3; + __pyx_L7_bool_binop_done:; + if (__pyx_t_2) { + goto __pyx_L6; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":498 + * else: + * # Get the trace_obj from where the exception was raised... + * while trace_obj.tb_next is not None: # <<<<<<<<<<<<<< + * trace_obj = trace_obj.tb_next + * + */ + /*else*/ { + while (1) { + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_next); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 498, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__pyx_t_1 != Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (!__pyx_t_3) break; + + /* "_pydevd_bundle/pydevd_cython.pyx":499 + * # Get the trace_obj from where the exception was raised... + * while trace_obj.tb_next is not None: + * trace_obj = trace_obj.tb_next # <<<<<<<<<<<<<< + * + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_next); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 499, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_trace_obj, __pyx_t_1); + __pyx_t_1 = 0; + } + } + __pyx_L6:; + + /* "_pydevd_bundle/pydevd_cython.pyx":501 + * trace_obj = trace_obj.tb_next + * + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: # <<<<<<<<<<<<<< + * for check_trace_obj in (initial_trace_obj, trace_obj): + * abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_ignore_exceptions_thrown_in_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 501, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 501, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":502 + * + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + * for check_trace_obj in (initial_trace_obj, trace_obj): # <<<<<<<<<<<<<< + * abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame) + * absolute_filename = abs_real_path_and_base[0] + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 502, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_initial_trace_obj); + __Pyx_GIVEREF(__pyx_v_initial_trace_obj); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_initial_trace_obj); + __Pyx_INCREF(__pyx_v_trace_obj); + __Pyx_GIVEREF(__pyx_v_trace_obj); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_trace_obj); + __pyx_t_5 = __pyx_t_1; __Pyx_INCREF(__pyx_t_5); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + for (;;) { + if (__pyx_t_6 >= 2) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_5, __pyx_t_6); __Pyx_INCREF(__pyx_t_1); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 502, __pyx_L4_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_5, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 502, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + __Pyx_XDECREF_SET(__pyx_v_check_trace_obj, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":503 + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + * for check_trace_obj in (initial_trace_obj, trace_obj): + * abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame) # <<<<<<<<<<<<<< + * absolute_filename = abs_real_path_and_base[0] + * canonical_normalized_filename = abs_real_path_and_base[1] + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 503, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_check_trace_obj, __pyx_n_s_tb_frame); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 503, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_9, __pyx_t_8) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 503, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 503, __pyx_L4_error) + __Pyx_XDECREF_SET(__pyx_v_abs_real_path_and_base, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":504 + * for check_trace_obj in (initial_trace_obj, trace_obj): + * abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame) + * absolute_filename = abs_real_path_and_base[0] # <<<<<<<<<<<<<< + * canonical_normalized_filename = abs_real_path_and_base[1] + * + */ + if (unlikely(__pyx_v_abs_real_path_and_base == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 504, __pyx_L4_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_abs_real_path_and_base, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 504, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 504, __pyx_L4_error) + __Pyx_XDECREF_SET(__pyx_v_absolute_filename, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":505 + * abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame) + * absolute_filename = abs_real_path_and_base[0] + * canonical_normalized_filename = abs_real_path_and_base[1] # <<<<<<<<<<<<<< + * + * filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored + */ + if (unlikely(__pyx_v_abs_real_path_and_base == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 505, __pyx_L4_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_abs_real_path_and_base, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 505, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 505, __pyx_L4_error) + __Pyx_XDECREF_SET(__pyx_v_canonical_normalized_filename, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":507 + * canonical_normalized_filename = abs_real_path_and_base[1] + * + * filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored # <<<<<<<<<<<<<< + * + * lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_filename_to_lines_where_exceptio); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 507, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyDict_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 507, __pyx_L4_error) + __Pyx_XDECREF_SET(__pyx_v_filename_to_lines_where_exceptions_are_ignored, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":509 + * filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored + * + * lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) # <<<<<<<<<<<<<< + * if lines_ignored is None: + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} + */ + if (unlikely(__pyx_v_filename_to_lines_where_exceptions_are_ignored == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(0, 509, __pyx_L4_error) + } + __pyx_t_1 = __Pyx_PyDict_GetItemDefault(__pyx_v_filename_to_lines_where_exceptions_are_ignored, __pyx_v_canonical_normalized_filename, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 509, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyDict_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 509, __pyx_L4_error) + __Pyx_XDECREF_SET(__pyx_v_lines_ignored, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":510 + * + * lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + * if lines_ignored is None: # <<<<<<<<<<<<<< + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} + * + */ + __pyx_t_3 = (__pyx_v_lines_ignored == ((PyObject*)Py_None)); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":511 + * lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + * if lines_ignored is None: + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} # <<<<<<<<<<<<<< + * + * try: + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 511, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_lines_ignored, __pyx_t_1); + if (unlikely(__pyx_v_filename_to_lines_where_exceptions_are_ignored == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 511, __pyx_L4_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_filename_to_lines_where_exceptions_are_ignored, __pyx_v_canonical_normalized_filename, __pyx_t_1) < 0)) __PYX_ERR(0, 511, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":510 + * + * lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + * if lines_ignored is None: # <<<<<<<<<<<<<< + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":513 + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} + * + * try: # <<<<<<<<<<<<<< + * curr_stat = os.stat(absolute_filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":514 + * + * try: + * curr_stat = os.stat(absolute_filename) # <<<<<<<<<<<<<< + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + * except: + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_os); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 514, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_stat); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 514, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_7, __pyx_v_absolute_filename) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_v_absolute_filename); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 514, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_curr_stat, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":515 + * try: + * curr_stat = os.stat(absolute_filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) # <<<<<<<<<<<<<< + * except: + * curr_stat = None + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_curr_stat, __pyx_n_s_st_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 515, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_curr_stat, __pyx_n_s_st_mtime); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 515, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 515, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_8); + __pyx_t_1 = 0; + __pyx_t_8 = 0; + __Pyx_DECREF_SET(__pyx_v_curr_stat, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":513 + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} + * + * try: # <<<<<<<<<<<<<< + * curr_stat = os.stat(absolute_filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + */ + } + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + goto __pyx_L22_try_end; + __pyx_L15_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":516 + * curr_stat = os.stat(absolute_filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + * except: # <<<<<<<<<<<<<< + * curr_stat = None + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_7, &__pyx_t_8, &__pyx_t_1) < 0) __PYX_ERR(0, 516, __pyx_L17_except_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":517 + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + * except: + * curr_stat = None # <<<<<<<<<<<<<< + * + * last_stat = self.filename_to_stat_info.get(absolute_filename) + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_curr_stat, Py_None); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L16_exception_handled; + } + __pyx_L17_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":513 + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} + * + * try: # <<<<<<<<<<<<<< + * curr_stat = os.stat(absolute_filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + */ + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); + goto __pyx_L4_error; + __pyx_L16_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); + __pyx_L22_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":519 + * curr_stat = None + * + * last_stat = self.filename_to_stat_info.get(absolute_filename) # <<<<<<<<<<<<<< + * if last_stat != curr_stat: + * self.filename_to_stat_info[absolute_filename] = curr_stat + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_filename_to_stat_info); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 519, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_get); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 519, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_v_absolute_filename) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_absolute_filename); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 519, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF_SET(__pyx_v_last_stat, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":520 + * + * last_stat = self.filename_to_stat_info.get(absolute_filename) + * if last_stat != curr_stat: # <<<<<<<<<<<<<< + * self.filename_to_stat_info[absolute_filename] = curr_stat + * lines_ignored.clear() + */ + __pyx_t_1 = PyObject_RichCompare(__pyx_v_last_stat, __pyx_v_curr_stat, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 520, __pyx_L4_error) + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 520, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":521 + * last_stat = self.filename_to_stat_info.get(absolute_filename) + * if last_stat != curr_stat: + * self.filename_to_stat_info[absolute_filename] = curr_stat # <<<<<<<<<<<<<< + * lines_ignored.clear() + * try: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_filename_to_stat_info); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 521, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + if (unlikely(PyObject_SetItem(__pyx_t_1, __pyx_v_absolute_filename, __pyx_v_curr_stat) < 0)) __PYX_ERR(0, 521, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":522 + * if last_stat != curr_stat: + * self.filename_to_stat_info[absolute_filename] = curr_stat + * lines_ignored.clear() # <<<<<<<<<<<<<< + * try: + * linecache.checkcache(absolute_filename) + */ + if (unlikely(__pyx_v_lines_ignored == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "clear"); + __PYX_ERR(0, 522, __pyx_L4_error) + } + __pyx_t_13 = __Pyx_PyDict_Clear(__pyx_v_lines_ignored); if (unlikely(__pyx_t_13 == ((int)-1))) __PYX_ERR(0, 522, __pyx_L4_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":523 + * self.filename_to_stat_info[absolute_filename] = curr_stat + * lines_ignored.clear() + * try: # <<<<<<<<<<<<<< + * linecache.checkcache(absolute_filename) + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_12, &__pyx_t_11, &__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_10); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":524 + * lines_ignored.clear() + * try: + * linecache.checkcache(absolute_filename) # <<<<<<<<<<<<<< + * except: + * pydev_log.exception('Error in linecache.checkcache(%r)', absolute_filename) + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_linecache); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 524, __pyx_L26_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_checkcache); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 524, __pyx_L26_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_7, __pyx_v_absolute_filename) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_v_absolute_filename); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 524, __pyx_L26_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":523 + * self.filename_to_stat_info[absolute_filename] = curr_stat + * lines_ignored.clear() + * try: # <<<<<<<<<<<<<< + * linecache.checkcache(absolute_filename) + * except: + */ + } + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + goto __pyx_L33_try_end; + __pyx_L26_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":525 + * try: + * linecache.checkcache(absolute_filename) + * except: # <<<<<<<<<<<<<< + * pydev_log.exception('Error in linecache.checkcache(%r)', absolute_filename) + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_8, &__pyx_t_7) < 0) __PYX_ERR(0, 525, __pyx_L28_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_7); + + /* "_pydevd_bundle/pydevd_cython.pyx":526 + * linecache.checkcache(absolute_filename) + * except: + * pydev_log.exception('Error in linecache.checkcache(%r)', absolute_filename) # <<<<<<<<<<<<<< + * + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + */ + __Pyx_GetModuleGlobalName(__pyx_t_14, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 526, __pyx_L28_except_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_exception); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 526, __pyx_L28_except_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = NULL; + __pyx_t_16 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_15))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_15); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_15, function); + __pyx_t_16 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[3] = {__pyx_t_14, __pyx_kp_s_Error_in_linecache_checkcache_r, __pyx_v_absolute_filename}; + __pyx_t_9 = __Pyx_PyFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_16, 2+__pyx_t_16); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 526, __pyx_L28_except_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[3] = {__pyx_t_14, __pyx_kp_s_Error_in_linecache_checkcache_r, __pyx_v_absolute_filename}; + __pyx_t_9 = __Pyx_PyCFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_16, 2+__pyx_t_16); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 526, __pyx_L28_except_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + { + __pyx_t_17 = PyTuple_New(2+__pyx_t_16); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 526, __pyx_L28_except_error) + __Pyx_GOTREF(__pyx_t_17); + if (__pyx_t_14) { + __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_17, 0, __pyx_t_14); __pyx_t_14 = NULL; + } + __Pyx_INCREF(__pyx_kp_s_Error_in_linecache_checkcache_r); + __Pyx_GIVEREF(__pyx_kp_s_Error_in_linecache_checkcache_r); + PyTuple_SET_ITEM(__pyx_t_17, 0+__pyx_t_16, __pyx_kp_s_Error_in_linecache_checkcache_r); + __Pyx_INCREF(__pyx_v_absolute_filename); + __Pyx_GIVEREF(__pyx_v_absolute_filename); + PyTuple_SET_ITEM(__pyx_t_17, 1+__pyx_t_16, __pyx_v_absolute_filename); + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_15, __pyx_t_17, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 526, __pyx_L28_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + } + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L27_exception_handled; + } + __pyx_L28_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":523 + * self.filename_to_stat_info[absolute_filename] = curr_stat + * lines_ignored.clear() + * try: # <<<<<<<<<<<<<< + * linecache.checkcache(absolute_filename) + * except: + */ + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_12, __pyx_t_11, __pyx_t_10); + goto __pyx_L4_error; + __pyx_L27_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_12, __pyx_t_11, __pyx_t_10); + __pyx_L33_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":520 + * + * last_stat = self.filename_to_stat_info.get(absolute_filename) + * if last_stat != curr_stat: # <<<<<<<<<<<<<< + * self.filename_to_stat_info[absolute_filename] = curr_stat + * lines_ignored.clear() + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":528 + * pydev_log.exception('Error in linecache.checkcache(%r)', absolute_filename) + * + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) # <<<<<<<<<<<<<< + * if from_user_input: + * merged = {} + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_filename_to_lines_where_exceptio); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 528, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_get); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 528, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_7 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_8, __pyx_v_canonical_normalized_filename) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_canonical_normalized_filename); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 528, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_from_user_input, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":529 + * + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + * if from_user_input: # <<<<<<<<<<<<<< + * merged = {} + * merged.update(lines_ignored) + */ + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_from_user_input); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 529, __pyx_L4_error) + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":530 + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + * if from_user_input: + * merged = {} # <<<<<<<<<<<<<< + * merged.update(lines_ignored) + * # Override what we have with the related entries that the user entered + */ + __pyx_t_7 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 530, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_XDECREF_SET(__pyx_v_merged, ((PyObject*)__pyx_t_7)); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":531 + * if from_user_input: + * merged = {} + * merged.update(lines_ignored) # <<<<<<<<<<<<<< + * # Override what we have with the related entries that the user entered + * merged.update(from_user_input) + */ + __pyx_t_7 = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_update, __pyx_v_merged, __pyx_v_lines_ignored); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 531, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":533 + * merged.update(lines_ignored) + * # Override what we have with the related entries that the user entered + * merged.update(from_user_input) # <<<<<<<<<<<<<< + * else: + * merged = lines_ignored + */ + __pyx_t_7 = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_update, __pyx_v_merged, __pyx_v_from_user_input); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 533, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":529 + * + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + * if from_user_input: # <<<<<<<<<<<<<< + * merged = {} + * merged.update(lines_ignored) + */ + goto __pyx_L36; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":535 + * merged.update(from_user_input) + * else: + * merged = lines_ignored # <<<<<<<<<<<<<< + * + * exc_lineno = check_trace_obj.tb_lineno + */ + /*else*/ { + __Pyx_INCREF(__pyx_v_lines_ignored); + __Pyx_XDECREF_SET(__pyx_v_merged, __pyx_v_lines_ignored); + } + __pyx_L36:; + + /* "_pydevd_bundle/pydevd_cython.pyx":537 + * merged = lines_ignored + * + * exc_lineno = check_trace_obj.tb_lineno # <<<<<<<<<<<<<< + * + * # print ('lines ignored', lines_ignored) + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_check_trace_obj, __pyx_n_s_tb_lineno); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 537, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_XDECREF_SET(__pyx_v_exc_lineno, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":543 + * # print ('merged', merged, 'curr', exc_lineno) + * + * if exc_lineno not in merged: # Note: check on merged but update lines_ignored. # <<<<<<<<<<<<<< + * try: + * line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + */ + if (unlikely(__pyx_v_merged == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 543, __pyx_L4_error) + } + __pyx_t_2 = (__Pyx_PyDict_ContainsTF(__pyx_v_exc_lineno, __pyx_v_merged, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 543, __pyx_L4_error) + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":544 + * + * if exc_lineno not in merged: # Note: check on merged but update lines_ignored. + * try: # <<<<<<<<<<<<<< + * line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":545 + * if exc_lineno not in merged: # Note: check on merged but update lines_ignored. + * try: + * line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) # <<<<<<<<<<<<<< + * except: + * pydev_log.exception('Error in linecache.getline(%r, %s, f_globals)', absolute_filename, exc_lineno) + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_linecache); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 545, __pyx_L38_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_getline); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 545, __pyx_L38_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_check_trace_obj, __pyx_n_s_tb_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 545, __pyx_L38_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_f_globals); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 545, __pyx_L38_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + __pyx_t_16 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_16 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_absolute_filename, __pyx_v_exc_lineno, __pyx_t_9}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_16, 3+__pyx_t_16); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 545, __pyx_L38_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_absolute_filename, __pyx_v_exc_lineno, __pyx_t_9}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_16, 3+__pyx_t_16); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 545, __pyx_L38_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + { + __pyx_t_15 = PyTuple_New(3+__pyx_t_16); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 545, __pyx_L38_error) + __Pyx_GOTREF(__pyx_t_15); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_15, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_absolute_filename); + __Pyx_GIVEREF(__pyx_v_absolute_filename); + PyTuple_SET_ITEM(__pyx_t_15, 0+__pyx_t_16, __pyx_v_absolute_filename); + __Pyx_INCREF(__pyx_v_exc_lineno); + __Pyx_GIVEREF(__pyx_v_exc_lineno); + PyTuple_SET_ITEM(__pyx_t_15, 1+__pyx_t_16, __pyx_v_exc_lineno); + __Pyx_GIVEREF(__pyx_t_9); + PyTuple_SET_ITEM(__pyx_t_15, 2+__pyx_t_16, __pyx_t_9); + __pyx_t_9 = 0; + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_15, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 545, __pyx_L38_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_line, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":544 + * + * if exc_lineno not in merged: # Note: check on merged but update lines_ignored. + * try: # <<<<<<<<<<<<<< + * line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + * except: + */ + } + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + goto __pyx_L45_try_end; + __pyx_L38_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":546 + * try: + * line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + * except: # <<<<<<<<<<<<<< + * pydev_log.exception('Error in linecache.getline(%r, %s, f_globals)', absolute_filename, exc_lineno) + * line = '' + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_7, &__pyx_t_8, &__pyx_t_15) < 0) __PYX_ERR(0, 546, __pyx_L40_except_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_15); + + /* "_pydevd_bundle/pydevd_cython.pyx":547 + * line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + * except: + * pydev_log.exception('Error in linecache.getline(%r, %s, f_globals)', absolute_filename, exc_lineno) # <<<<<<<<<<<<<< + * line = '' + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 547, __pyx_L40_except_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_exception); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 547, __pyx_L40_except_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + __pyx_t_16 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_17))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_17); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_17); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_17, function); + __pyx_t_16 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_17)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_kp_s_Error_in_linecache_getline_r_s_f, __pyx_v_absolute_filename, __pyx_v_exc_lineno}; + __pyx_t_9 = __Pyx_PyFunction_FastCall(__pyx_t_17, __pyx_temp+1-__pyx_t_16, 3+__pyx_t_16); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 547, __pyx_L40_except_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_17)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_kp_s_Error_in_linecache_getline_r_s_f, __pyx_v_absolute_filename, __pyx_v_exc_lineno}; + __pyx_t_9 = __Pyx_PyCFunction_FastCall(__pyx_t_17, __pyx_temp+1-__pyx_t_16, 3+__pyx_t_16); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 547, __pyx_L40_except_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + { + __pyx_t_14 = PyTuple_New(3+__pyx_t_16); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 547, __pyx_L40_except_error) + __Pyx_GOTREF(__pyx_t_14); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_kp_s_Error_in_linecache_getline_r_s_f); + __Pyx_GIVEREF(__pyx_kp_s_Error_in_linecache_getline_r_s_f); + PyTuple_SET_ITEM(__pyx_t_14, 0+__pyx_t_16, __pyx_kp_s_Error_in_linecache_getline_r_s_f); + __Pyx_INCREF(__pyx_v_absolute_filename); + __Pyx_GIVEREF(__pyx_v_absolute_filename); + PyTuple_SET_ITEM(__pyx_t_14, 1+__pyx_t_16, __pyx_v_absolute_filename); + __Pyx_INCREF(__pyx_v_exc_lineno); + __Pyx_GIVEREF(__pyx_v_exc_lineno); + PyTuple_SET_ITEM(__pyx_t_14, 2+__pyx_t_16, __pyx_v_exc_lineno); + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_17, __pyx_t_14, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 547, __pyx_L40_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":548 + * except: + * pydev_log.exception('Error in linecache.getline(%r, %s, f_globals)', absolute_filename, exc_lineno) + * line = '' # <<<<<<<<<<<<<< + * + * if IGNORE_EXCEPTION_TAG.match(line) is not None: + */ + __Pyx_INCREF(__pyx_kp_s_); + __Pyx_XDECREF_SET(__pyx_v_line, __pyx_kp_s_); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + goto __pyx_L39_exception_handled; + } + __pyx_L40_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":544 + * + * if exc_lineno not in merged: # Note: check on merged but update lines_ignored. + * try: # <<<<<<<<<<<<<< + * line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + * except: + */ + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); + goto __pyx_L4_error; + __pyx_L39_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); + __pyx_L45_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":550 + * line = '' + * + * if IGNORE_EXCEPTION_TAG.match(line) is not None: # <<<<<<<<<<<<<< + * lines_ignored[exc_lineno] = 1 + * return False + */ + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_IGNORE_EXCEPTION_TAG); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 550, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_match); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 550, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_15 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_v_line) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_line); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 550, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_3 = (__pyx_t_15 != Py_None); + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":551 + * + * if IGNORE_EXCEPTION_TAG.match(line) is not None: + * lines_ignored[exc_lineno] = 1 # <<<<<<<<<<<<<< + * return False + * else: + */ + if (unlikely(__pyx_v_lines_ignored == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 551, __pyx_L4_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_lines_ignored, __pyx_v_exc_lineno, __pyx_int_1) < 0)) __PYX_ERR(0, 551, __pyx_L4_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":552 + * if IGNORE_EXCEPTION_TAG.match(line) is not None: + * lines_ignored[exc_lineno] = 1 + * return False # <<<<<<<<<<<<<< + * else: + * # Put in the cache saying not to ignore + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":550 + * line = '' + * + * if IGNORE_EXCEPTION_TAG.match(line) is not None: # <<<<<<<<<<<<<< + * lines_ignored[exc_lineno] = 1 + * return False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":555 + * else: + * # Put in the cache saying not to ignore + * lines_ignored[exc_lineno] = 0 # <<<<<<<<<<<<<< + * else: + * # Ok, dict has it already cached, so, let's check it... + */ + /*else*/ { + if (unlikely(__pyx_v_lines_ignored == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 555, __pyx_L4_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_lines_ignored, __pyx_v_exc_lineno, __pyx_int_0) < 0)) __PYX_ERR(0, 555, __pyx_L4_error) + } + + /* "_pydevd_bundle/pydevd_cython.pyx":543 + * # print ('merged', merged, 'curr', exc_lineno) + * + * if exc_lineno not in merged: # Note: check on merged but update lines_ignored. # <<<<<<<<<<<<<< + * try: + * line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + */ + goto __pyx_L37; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":558 + * else: + * # Ok, dict has it already cached, so, let's check it... + * if merged.get(exc_lineno, 0): # <<<<<<<<<<<<<< + * return False + * + */ + /*else*/ { + if (unlikely(__pyx_v_merged == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(0, 558, __pyx_L4_error) + } + __pyx_t_15 = __Pyx_PyDict_GetItemDefault(__pyx_v_merged, __pyx_v_exc_lineno, __pyx_int_0); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 558, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_15); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 558, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":559 + * # Ok, dict has it already cached, so, let's check it... + * if merged.get(exc_lineno, 0): + * return False # <<<<<<<<<<<<<< + * + * thread = self._args[3] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":558 + * else: + * # Ok, dict has it already cached, so, let's check it... + * if merged.get(exc_lineno, 0): # <<<<<<<<<<<<<< + * return False + * + */ + } + } + __pyx_L37:; + + /* "_pydevd_bundle/pydevd_cython.pyx":502 + * + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + * for check_trace_obj in (initial_trace_obj, trace_obj): # <<<<<<<<<<<<<< + * abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame) + * absolute_filename = abs_real_path_and_base[0] + */ + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":501 + * trace_obj = trace_obj.tb_next + * + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: # <<<<<<<<<<<<<< + * for check_trace_obj in (initial_trace_obj, trace_obj): + * abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":561 + * return False + * + * thread = self._args[3] # <<<<<<<<<<<<<< + * + * try: + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 561, __pyx_L4_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 561, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_v_thread = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":563 + * thread = self._args[3] + * + * try: # <<<<<<<<<<<<<< + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_12, &__pyx_t_11, &__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_10); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":564 + * + * try: + * frame_id_to_frame = {} # <<<<<<<<<<<<<< + * frame_id_to_frame[id(frame)] = frame + * f = trace_obj.tb_frame + */ + __pyx_t_5 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 564, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_v_frame_id_to_frame = ((PyObject*)__pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":565 + * try: + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame # <<<<<<<<<<<<<< + * f = trace_obj.tb_frame + * while f is not None: + */ + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_builtin_id, __pyx_v_frame); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 565, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_5); + if (unlikely(PyDict_SetItem(__pyx_v_frame_id_to_frame, __pyx_t_5, __pyx_v_frame) < 0)) __PYX_ERR(0, 565, __pyx_L50_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":566 + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame + * f = trace_obj.tb_frame # <<<<<<<<<<<<<< + * while f is not None: + * frame_id_to_frame[id(f)] = f + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_frame); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 566, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_v_f = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":567 + * frame_id_to_frame[id(frame)] = frame + * f = trace_obj.tb_frame + * while f is not None: # <<<<<<<<<<<<<< + * frame_id_to_frame[id(f)] = f + * f = f.f_back + */ + while (1) { + __pyx_t_2 = (__pyx_v_f != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (!__pyx_t_3) break; + + /* "_pydevd_bundle/pydevd_cython.pyx":568 + * f = trace_obj.tb_frame + * while f is not None: + * frame_id_to_frame[id(f)] = f # <<<<<<<<<<<<<< + * f = f.f_back + * f = None + */ + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_builtin_id, __pyx_v_f); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 568, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_5); + if (unlikely(PyDict_SetItem(__pyx_v_frame_id_to_frame, __pyx_t_5, __pyx_v_f) < 0)) __PYX_ERR(0, 568, __pyx_L50_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":569 + * while f is not None: + * frame_id_to_frame[id(f)] = f + * f = f.f_back # <<<<<<<<<<<<<< + * f = None + * + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_back); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 569, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF_SET(__pyx_v_f, __pyx_t_5); + __pyx_t_5 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":570 + * frame_id_to_frame[id(f)] = f + * f = f.f_back + * f = None # <<<<<<<<<<<<<< + * + * stopped = True + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_f, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":572 + * f = None + * + * stopped = True # <<<<<<<<<<<<<< + * main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + * try: + */ + __pyx_v_stopped = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":573 + * + * stopped = True + * main_debugger.send_caught_exception_stack(thread, arg, id(frame)) # <<<<<<<<<<<<<< + * try: + * self.set_suspend(thread, 137) + */ + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_send_caught_exception_stack); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 573, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_builtin_id, __pyx_v_frame); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 573, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = NULL; + __pyx_t_16 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_15))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_15); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_15, function); + __pyx_t_16 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_v_thread, __pyx_v_arg, __pyx_t_7}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_16, 3+__pyx_t_16); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 573, __pyx_L50_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_v_thread, __pyx_v_arg, __pyx_t_7}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_16, 3+__pyx_t_16); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 573, __pyx_L50_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_9 = PyTuple_New(3+__pyx_t_16); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 573, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_9); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_16, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_16, __pyx_v_arg); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_9, 2+__pyx_t_16, __pyx_t_7); + __pyx_t_7 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_15, __pyx_t_9, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 573, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":574 + * stopped = True + * main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + * try: # <<<<<<<<<<<<<< + * self.set_suspend(thread, 137) + * self.do_wait_suspend(thread, frame, event, arg, exception_type=exception_type) + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":575 + * main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + * try: + * self.set_suspend(thread, 137) # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, frame, event, arg, exception_type=exception_type) + * finally: + */ + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 575, __pyx_L59_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_9 = NULL; + __pyx_t_16 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_15))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_15); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_15, function); + __pyx_t_16 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_v_thread, __pyx_int_137}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_16, 2+__pyx_t_16); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 575, __pyx_L59_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_v_thread, __pyx_int_137}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_16, 2+__pyx_t_16); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 575, __pyx_L59_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_7 = PyTuple_New(2+__pyx_t_16); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 575, __pyx_L59_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_9) { + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_9); __pyx_t_9 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_16, __pyx_v_thread); + __Pyx_INCREF(__pyx_int_137); + __Pyx_GIVEREF(__pyx_int_137); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_16, __pyx_int_137); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_15, __pyx_t_7, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 575, __pyx_L59_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":576 + * try: + * self.set_suspend(thread, 137) + * self.do_wait_suspend(thread, frame, event, arg, exception_type=exception_type) # <<<<<<<<<<<<<< + * finally: + * main_debugger.send_caught_exception_stack_proceeded(thread) + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 576, __pyx_L59_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_15 = PyTuple_New(4); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 576, __pyx_L59_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_15, 0, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_15, 1, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_15, 2, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_15, 3, __pyx_v_arg); + __pyx_t_7 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 576, __pyx_L59_error) + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_7, __pyx_n_s_exception_type, __pyx_v_exception_type) < 0) __PYX_ERR(0, 576, __pyx_L59_error) + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_15, __pyx_t_7); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 576, __pyx_L59_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":578 + * self.do_wait_suspend(thread, frame, event, arg, exception_type=exception_type) + * finally: + * main_debugger.send_caught_exception_stack_proceeded(thread) # <<<<<<<<<<<<<< + * except: + * pydev_log.exception() + */ + /*finally:*/ { + /*normal exit:*/{ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_send_caught_exception_stack_proc); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 578, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_15 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_15 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_15)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_15); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_9 = (__pyx_t_15) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_15, __pyx_v_thread) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_thread); + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 578, __pyx_L50_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L60; + } + __pyx_L59_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_23, &__pyx_t_24, &__pyx_t_25); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_20, &__pyx_t_21, &__pyx_t_22) < 0)) __Pyx_ErrFetch(&__pyx_t_20, &__pyx_t_21, &__pyx_t_22); + __Pyx_XGOTREF(__pyx_t_20); + __Pyx_XGOTREF(__pyx_t_21); + __Pyx_XGOTREF(__pyx_t_22); + __Pyx_XGOTREF(__pyx_t_23); + __Pyx_XGOTREF(__pyx_t_24); + __Pyx_XGOTREF(__pyx_t_25); + __pyx_t_16 = __pyx_lineno; __pyx_t_18 = __pyx_clineno; __pyx_t_19 = __pyx_filename; + { + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_send_caught_exception_stack_proc); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 578, __pyx_L62_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_15 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_15 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_15)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_15); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_9 = (__pyx_t_15) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_15, __pyx_v_thread) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_thread); + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 578, __pyx_L62_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_ExceptionReset(__pyx_t_23, __pyx_t_24, __pyx_t_25); + } + __Pyx_XGIVEREF(__pyx_t_20); + __Pyx_XGIVEREF(__pyx_t_21); + __Pyx_XGIVEREF(__pyx_t_22); + __Pyx_ErrRestore(__pyx_t_20, __pyx_t_21, __pyx_t_22); + __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; + __pyx_lineno = __pyx_t_16; __pyx_clineno = __pyx_t_18; __pyx_filename = __pyx_t_19; + goto __pyx_L50_error; + __pyx_L62_error:; + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_ExceptionReset(__pyx_t_23, __pyx_t_24, __pyx_t_25); + } + __Pyx_XDECREF(__pyx_t_20); __pyx_t_20 = 0; + __Pyx_XDECREF(__pyx_t_21); __pyx_t_21 = 0; + __Pyx_XDECREF(__pyx_t_22); __pyx_t_22 = 0; + __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; + goto __pyx_L50_error; + } + __pyx_L60:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":563 + * thread = self._args[3] + * + * try: # <<<<<<<<<<<<<< + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame + */ + } + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + goto __pyx_L55_try_end; + __pyx_L50_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":579 + * finally: + * main_debugger.send_caught_exception_stack_proceeded(thread) + * except: # <<<<<<<<<<<<<< + * pydev_log.exception() + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_9, &__pyx_t_7, &__pyx_t_15) < 0) __PYX_ERR(0, 579, __pyx_L52_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_15); + + /* "_pydevd_bundle/pydevd_cython.pyx":580 + * main_debugger.send_caught_exception_stack_proceeded(thread) + * except: + * pydev_log.exception() # <<<<<<<<<<<<<< + * + * main_debugger.set_trace_for_frame_and_parents(frame) + */ + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 580, __pyx_L52_except_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_exception); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 580, __pyx_L52_except_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_17))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_17); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_17); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_17, function); + } + } + __pyx_t_5 = (__pyx_t_8) ? __Pyx_PyObject_CallOneArg(__pyx_t_17, __pyx_t_8) : __Pyx_PyObject_CallNoArg(__pyx_t_17); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 580, __pyx_L52_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + goto __pyx_L51_exception_handled; + } + __pyx_L52_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":563 + * thread = self._args[3] + * + * try: # <<<<<<<<<<<<<< + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame + */ + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_12, __pyx_t_11, __pyx_t_10); + goto __pyx_L4_error; + __pyx_L51_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_12, __pyx_t_11, __pyx_t_10); + __pyx_L55_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":582 + * pydev_log.exception() + * + * main_debugger.set_trace_for_frame_and_parents(frame) # <<<<<<<<<<<<<< + * finally: + * # Make sure the user cannot see the '__exception__' we added after we leave the suspend state. + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_set_trace_for_frame_and_parents); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 582, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_15 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_9, __pyx_v_frame) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_frame); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 582, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":585 + * finally: + * # Make sure the user cannot see the '__exception__' we added after we leave the suspend state. + * remove_exception_from_frame(frame) # <<<<<<<<<<<<<< + * # Clear some local variables... + * frame = None + */ + /*finally:*/ { + /*normal exit:*/{ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_remove_exception_from_frame); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 585, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_15 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_9, __pyx_v_frame) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_frame); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 585, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":587 + * remove_exception_from_frame(frame) + * # Clear some local variables... + * frame = None # <<<<<<<<<<<<<< + * trace_obj = None + * initial_trace_obj = None + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_frame, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":588 + * # Clear some local variables... + * frame = None + * trace_obj = None # <<<<<<<<<<<<<< + * initial_trace_obj = None + * check_trace_obj = None + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":589 + * frame = None + * trace_obj = None + * initial_trace_obj = None # <<<<<<<<<<<<<< + * check_trace_obj = None + * f = None + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_initial_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":590 + * trace_obj = None + * initial_trace_obj = None + * check_trace_obj = None # <<<<<<<<<<<<<< + * f = None + * frame_id_to_frame = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_check_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":591 + * initial_trace_obj = None + * check_trace_obj = None + * f = None # <<<<<<<<<<<<<< + * frame_id_to_frame = None + * main_debugger = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":592 + * check_trace_obj = None + * f = None + * frame_id_to_frame = None # <<<<<<<<<<<<<< + * main_debugger = None + * thread = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_frame_id_to_frame, ((PyObject*)Py_None)); + + /* "_pydevd_bundle/pydevd_cython.pyx":593 + * f = None + * frame_id_to_frame = None + * main_debugger = None # <<<<<<<<<<<<<< + * thread = None + * + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_main_debugger, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":594 + * frame_id_to_frame = None + * main_debugger = None + * thread = None # <<<<<<<<<<<<<< + * + * return stopped + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_thread, Py_None); + goto __pyx_L5; + } + __pyx_L4_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_25 = 0; __pyx_t_24 = 0; __pyx_t_23 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_25, &__pyx_t_24, &__pyx_t_23); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12) < 0)) __Pyx_ErrFetch(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_25); + __Pyx_XGOTREF(__pyx_t_24); + __Pyx_XGOTREF(__pyx_t_23); + __pyx_t_18 = __pyx_lineno; __pyx_t_16 = __pyx_clineno; __pyx_t_26 = __pyx_filename; + { + + /* "_pydevd_bundle/pydevd_cython.pyx":585 + * finally: + * # Make sure the user cannot see the '__exception__' we added after we leave the suspend state. + * remove_exception_from_frame(frame) # <<<<<<<<<<<<<< + * # Clear some local variables... + * frame = None + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_remove_exception_from_frame); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 585, __pyx_L66_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_15 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_9, __pyx_v_frame) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_frame); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 585, __pyx_L66_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":587 + * remove_exception_from_frame(frame) + * # Clear some local variables... + * frame = None # <<<<<<<<<<<<<< + * trace_obj = None + * initial_trace_obj = None + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_frame, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":588 + * # Clear some local variables... + * frame = None + * trace_obj = None # <<<<<<<<<<<<<< + * initial_trace_obj = None + * check_trace_obj = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":589 + * frame = None + * trace_obj = None + * initial_trace_obj = None # <<<<<<<<<<<<<< + * check_trace_obj = None + * f = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_initial_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":590 + * trace_obj = None + * initial_trace_obj = None + * check_trace_obj = None # <<<<<<<<<<<<<< + * f = None + * frame_id_to_frame = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_check_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":591 + * initial_trace_obj = None + * check_trace_obj = None + * f = None # <<<<<<<<<<<<<< + * frame_id_to_frame = None + * main_debugger = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":592 + * check_trace_obj = None + * f = None + * frame_id_to_frame = None # <<<<<<<<<<<<<< + * main_debugger = None + * thread = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_frame_id_to_frame, ((PyObject*)Py_None)); + + /* "_pydevd_bundle/pydevd_cython.pyx":593 + * f = None + * frame_id_to_frame = None + * main_debugger = None # <<<<<<<<<<<<<< + * thread = None + * + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_main_debugger, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":594 + * frame_id_to_frame = None + * main_debugger = None + * thread = None # <<<<<<<<<<<<<< + * + * return stopped + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_thread, Py_None); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_ExceptionReset(__pyx_t_25, __pyx_t_24, __pyx_t_23); + } + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ErrRestore(__pyx_t_10, __pyx_t_11, __pyx_t_12); + __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_25 = 0; __pyx_t_24 = 0; __pyx_t_23 = 0; + __pyx_lineno = __pyx_t_18; __pyx_clineno = __pyx_t_16; __pyx_filename = __pyx_t_26; + goto __pyx_L1_error; + __pyx_L66_error:; + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_ExceptionReset(__pyx_t_25, __pyx_t_24, __pyx_t_23); + } + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_25 = 0; __pyx_t_24 = 0; __pyx_t_23 = 0; + goto __pyx_L1_error; + } + __pyx_L3_return: { + __pyx_t_23 = __pyx_r; + __pyx_r = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":585 + * finally: + * # Make sure the user cannot see the '__exception__' we added after we leave the suspend state. + * remove_exception_from_frame(frame) # <<<<<<<<<<<<<< + * # Clear some local variables... + * frame = None + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_remove_exception_from_frame); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 585, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_15 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_9, __pyx_v_frame) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_frame); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 585, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":587 + * remove_exception_from_frame(frame) + * # Clear some local variables... + * frame = None # <<<<<<<<<<<<<< + * trace_obj = None + * initial_trace_obj = None + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_frame, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":588 + * # Clear some local variables... + * frame = None + * trace_obj = None # <<<<<<<<<<<<<< + * initial_trace_obj = None + * check_trace_obj = None + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":589 + * frame = None + * trace_obj = None + * initial_trace_obj = None # <<<<<<<<<<<<<< + * check_trace_obj = None + * f = None + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_initial_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":590 + * trace_obj = None + * initial_trace_obj = None + * check_trace_obj = None # <<<<<<<<<<<<<< + * f = None + * frame_id_to_frame = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_check_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":591 + * initial_trace_obj = None + * check_trace_obj = None + * f = None # <<<<<<<<<<<<<< + * frame_id_to_frame = None + * main_debugger = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":592 + * check_trace_obj = None + * f = None + * frame_id_to_frame = None # <<<<<<<<<<<<<< + * main_debugger = None + * thread = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_frame_id_to_frame, ((PyObject*)Py_None)); + + /* "_pydevd_bundle/pydevd_cython.pyx":593 + * f = None + * frame_id_to_frame = None + * main_debugger = None # <<<<<<<<<<<<<< + * thread = None + * + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_main_debugger, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":594 + * frame_id_to_frame = None + * main_debugger = None + * thread = None # <<<<<<<<<<<<<< + * + * return stopped + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_thread, Py_None); + __pyx_r = __pyx_t_23; + __pyx_t_23 = 0; + goto __pyx_L0; + } + __pyx_L5:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":596 + * thread = None + * + * return stopped # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_15 = __Pyx_PyBool_FromLong(__pyx_v_stopped); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 596, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_r = __pyx_t_15; + __pyx_t_15 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":470 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _handle_exception(self, frame, str event, arg, str exception_type): # <<<<<<<<<<<<<< + * cdef bint stopped; + * cdef tuple abs_real_path_and_base; + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_14); + __Pyx_XDECREF(__pyx_t_15); + __Pyx_XDECREF(__pyx_t_17); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_abs_real_path_and_base); + __Pyx_XDECREF(__pyx_v_absolute_filename); + __Pyx_XDECREF(__pyx_v_canonical_normalized_filename); + __Pyx_XDECREF(__pyx_v_filename_to_lines_where_exceptions_are_ignored); + __Pyx_XDECREF(__pyx_v_lines_ignored); + __Pyx_XDECREF(__pyx_v_frame_id_to_frame); + __Pyx_XDECREF(__pyx_v_merged); + __Pyx_XDECREF(__pyx_v_trace_obj); + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_initial_trace_obj); + __Pyx_XDECREF(__pyx_v_check_trace_obj); + __Pyx_XDECREF(__pyx_v_curr_stat); + __Pyx_XDECREF(__pyx_v_last_stat); + __Pyx_XDECREF(__pyx_v_from_user_input); + __Pyx_XDECREF(__pyx_v_exc_lineno); + __Pyx_XDECREF(__pyx_v_line); + __Pyx_XDECREF(__pyx_v_thread); + __Pyx_XDECREF(__pyx_v_f); + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":599 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef get_func_name(self, frame): # <<<<<<<<<<<<<< + * cdef str func_name + * # ELSE + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_get_func_name(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame) { + PyObject *__pyx_v_func_name = 0; + PyObject *__pyx_v_code_obj = NULL; + PyObject *__pyx_v_cls_name = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_func_name", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":604 + * # def get_func_name(self, frame): + * # ENDIF + * code_obj = frame.f_code # <<<<<<<<<<<<<< + * func_name = code_obj.co_name + * try: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 604, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_code_obj = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":605 + * # ENDIF + * code_obj = frame.f_code + * func_name = code_obj.co_name # <<<<<<<<<<<<<< + * try: + * cls_name = get_clsname_for_code(code_obj, frame) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_code_obj, __pyx_n_s_co_name); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 605, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 605, __pyx_L1_error) + __pyx_v_func_name = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":606 + * code_obj = frame.f_code + * func_name = code_obj.co_name + * try: # <<<<<<<<<<<<<< + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":607 + * func_name = code_obj.co_name + * try: + * cls_name = get_clsname_for_code(code_obj, frame) # <<<<<<<<<<<<<< + * if cls_name is not None: + * return "%s.%s" % (cls_name, func_name) + */ + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_get_clsname_for_code); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 607, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + __pyx_t_7 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + __pyx_t_7 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_code_obj, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 607, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_code_obj, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 607, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 607, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_code_obj); + __Pyx_GIVEREF(__pyx_v_code_obj); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_7, __pyx_v_code_obj); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_7, __pyx_v_frame); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 607, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_v_cls_name = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":608 + * try: + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: # <<<<<<<<<<<<<< + * return "%s.%s" % (cls_name, func_name) + * else: + */ + __pyx_t_9 = (__pyx_v_cls_name != Py_None); + __pyx_t_10 = (__pyx_t_9 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":609 + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: + * return "%s.%s" % (cls_name, func_name) # <<<<<<<<<<<<<< + * else: + * return func_name + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 609, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_cls_name); + __Pyx_GIVEREF(__pyx_v_cls_name); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_cls_name); + __Pyx_INCREF(__pyx_v_func_name); + __Pyx_GIVEREF(__pyx_v_func_name); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_func_name); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_s_s, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 609, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":608 + * try: + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: # <<<<<<<<<<<<<< + * return "%s.%s" % (cls_name, func_name) + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":611 + * return "%s.%s" % (cls_name, func_name) + * else: + * return func_name # <<<<<<<<<<<<<< + * except: + * pydev_log.exception() + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_func_name); + __pyx_r = __pyx_v_func_name; + goto __pyx_L7_try_return; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":606 + * code_obj = frame.f_code + * func_name = code_obj.co_name + * try: # <<<<<<<<<<<<<< + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: + */ + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":612 + * else: + * return func_name + * except: # <<<<<<<<<<<<<< + * pydev_log.exception() + * return func_name + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.get_func_name", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_8) < 0) __PYX_ERR(0, 612, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_8); + + /* "_pydevd_bundle/pydevd_cython.pyx":613 + * return func_name + * except: + * pydev_log.exception() # <<<<<<<<<<<<<< + * return func_name + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_11, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 613, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_11, __pyx_n_s_exception); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 613, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_11 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_11)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_11); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + __pyx_t_6 = (__pyx_t_11) ? __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_t_11) : __Pyx_PyObject_CallNoArg(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 613, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":614 + * except: + * pydev_log.exception() + * return func_name # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_func_name); + __pyx_r = __pyx_v_func_name; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L6_except_return; + } + __pyx_L5_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":606 + * code_obj = frame.f_code + * func_name = code_obj.co_name + * try: # <<<<<<<<<<<<<< + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L7_try_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":599 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef get_func_name(self, frame): # <<<<<<<<<<<<<< + * cdef str func_name + * # ELSE + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.get_func_name", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_func_name); + __Pyx_XDECREF(__pyx_v_code_obj); + __Pyx_XDECREF(__pyx_v_cls_name); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":617 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _show_return_values(self, frame, arg): # <<<<<<<<<<<<<< + * # ELSE + * # def _show_return_values(self, frame, arg): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__show_return_values(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_arg) { + PyObject *__pyx_v_f_locals_back = NULL; + PyObject *__pyx_v_return_values_dict = NULL; + PyObject *__pyx_v_name = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + int __pyx_t_13; + char const *__pyx_t_14; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_show_return_values", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":621 + * # def _show_return_values(self, frame, arg): + * # ENDIF + * try: # <<<<<<<<<<<<<< + * try: + * f_locals_back = getattr(frame.f_back, "f_locals", None) + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":622 + * # ENDIF + * try: + * try: # <<<<<<<<<<<<<< + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":623 + * try: + * try: + * f_locals_back = getattr(frame.f_back, "f_locals", None) # <<<<<<<<<<<<<< + * if f_locals_back is not None: + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 623, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_GetAttr3(__pyx_t_4, __pyx_n_s_f_locals, Py_None); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 623, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_f_locals_back = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":624 + * try: + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: # <<<<<<<<<<<<<< + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: + */ + __pyx_t_6 = (__pyx_v_f_locals_back != Py_None); + __pyx_t_7 = (__pyx_t_6 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":625 + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) # <<<<<<<<<<<<<< + * if return_values_dict is None: + * return_values_dict = {} + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_locals_back, __pyx_n_s_get); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 625, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 625, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_t_8, Py_None}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 625, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_t_8, Py_None}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 625, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + { + __pyx_t_11 = PyTuple_New(2+__pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 625, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_11); + if (__pyx_t_9) { + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_9); __pyx_t_9 = NULL; + } + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_11, 0+__pyx_t_10, __pyx_t_8); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_11, 1+__pyx_t_10, Py_None); + __pyx_t_8 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_11, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 625, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_return_values_dict = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":626 + * if f_locals_back is not None: + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: # <<<<<<<<<<<<<< + * return_values_dict = {} + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + */ + __pyx_t_7 = (__pyx_v_return_values_dict == Py_None); + __pyx_t_6 = (__pyx_t_7 != 0); + if (__pyx_t_6) { + + /* "_pydevd_bundle/pydevd_cython.pyx":627 + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: + * return_values_dict = {} # <<<<<<<<<<<<<< + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + * name = self.get_func_name(frame) + */ + __pyx_t_5 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 627, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF_SET(__pyx_v_return_values_dict, __pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":628 + * if return_values_dict is None: + * return_values_dict = {} + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict # <<<<<<<<<<<<<< + * name = self.get_func_name(frame) + * return_values_dict[name] = arg + */ + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 628, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + if (unlikely(PyObject_SetItem(__pyx_v_f_locals_back, __pyx_t_5, __pyx_v_return_values_dict) < 0)) __PYX_ERR(0, 628, __pyx_L6_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":626 + * if f_locals_back is not None: + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: # <<<<<<<<<<<<<< + * return_values_dict = {} + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":629 + * return_values_dict = {} + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + * name = self.get_func_name(frame) # <<<<<<<<<<<<<< + * return_values_dict[name] = arg + * except: + */ + __pyx_t_5 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->get_func_name(__pyx_v_self, __pyx_v_frame); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 629, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_v_name = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":630 + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + * name = self.get_func_name(frame) + * return_values_dict[name] = arg # <<<<<<<<<<<<<< + * except: + * pydev_log.exception() + */ + if (unlikely(PyObject_SetItem(__pyx_v_return_values_dict, __pyx_v_name, __pyx_v_arg) < 0)) __PYX_ERR(0, 630, __pyx_L6_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":624 + * try: + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: # <<<<<<<<<<<<<< + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":622 + * # ENDIF + * try: + * try: # <<<<<<<<<<<<<< + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L11_try_end; + __pyx_L6_error:; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":631 + * name = self.get_func_name(frame) + * return_values_dict[name] = arg + * except: # <<<<<<<<<<<<<< + * pydev_log.exception() + * finally: + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._show_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_4, &__pyx_t_11) < 0) __PYX_ERR(0, 631, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_11); + + /* "_pydevd_bundle/pydevd_cython.pyx":632 + * return_values_dict[name] = arg + * except: + * pydev_log.exception() # <<<<<<<<<<<<<< + * finally: + * f_locals_back = None + */ + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 632, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_exception); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 632, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + __pyx_t_8 = (__pyx_t_9) ? __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_t_9) : __Pyx_PyObject_CallNoArg(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 632, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + goto __pyx_L7_exception_handled; + } + __pyx_L8_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":622 + * # ENDIF + * try: + * try: # <<<<<<<<<<<<<< + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L4_error; + __pyx_L7_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + __pyx_L11_try_end:; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":634 + * pydev_log.exception() + * finally: + * f_locals_back = None # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + /*finally:*/ { + /*normal exit:*/{ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f_locals_back, Py_None); + goto __pyx_L5; + } + __pyx_L4_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1) < 0)) __Pyx_ErrFetch(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __pyx_t_10 = __pyx_lineno; __pyx_t_13 = __pyx_clineno; __pyx_t_14 = __pyx_filename; + { + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f_locals_back, Py_None); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_ErrRestore(__pyx_t_3, __pyx_t_2, __pyx_t_1); + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __pyx_lineno = __pyx_t_10; __pyx_clineno = __pyx_t_13; __pyx_filename = __pyx_t_14; + goto __pyx_L1_error; + } + __pyx_L5:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":617 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _show_return_values(self, frame, arg): # <<<<<<<<<<<<<< + * # ELSE + * # def _show_return_values(self, frame, arg): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._show_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_f_locals_back); + __Pyx_XDECREF(__pyx_v_return_values_dict); + __Pyx_XDECREF(__pyx_v_name); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":637 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _remove_return_values(self, main_debugger, frame): # <<<<<<<<<<<<<< + * # ELSE + * # def _remove_return_values(self, main_debugger, frame): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__remove_return_values(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_main_debugger, PyObject *__pyx_v_frame) { + PyObject *__pyx_v_f_locals_back = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + int __pyx_t_11; + PyObject *__pyx_t_12 = NULL; + int __pyx_t_13; + char const *__pyx_t_14; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_remove_return_values", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":641 + * # def _remove_return_values(self, main_debugger, frame): + * # ENDIF + * try: # <<<<<<<<<<<<<< + * try: + * # Showing return values was turned off, we should remove them from locals dict. + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":642 + * # ENDIF + * try: + * try: # <<<<<<<<<<<<<< + * # Showing return values was turned off, we should remove them from locals dict. + * # The values can be in the current frame or in the back one + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":645 + * # Showing return values was turned off, we should remove them from locals dict. + * # The values can be in the current frame or in the back one + * frame.f_locals.pop(RETURN_VALUES_DICT, None) # <<<<<<<<<<<<<< + * + * f_locals_back = getattr(frame.f_back, "f_locals", None) + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_locals); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 645, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_pop); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 645, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 645, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_5, Py_None}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 645, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_5, Py_None}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 645, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + { + __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 645, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_9); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_t_5); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, Py_None); + __pyx_t_5 = 0; + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_9, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 645, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":647 + * frame.f_locals.pop(RETURN_VALUES_DICT, None) + * + * f_locals_back = getattr(frame.f_back, "f_locals", None) # <<<<<<<<<<<<<< + * if f_locals_back is not None: + * f_locals_back.pop(RETURN_VALUES_DICT, None) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 647, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_GetAttr3(__pyx_t_4, __pyx_n_s_f_locals, Py_None); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 647, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_f_locals_back = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":648 + * + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: # <<<<<<<<<<<<<< + * f_locals_back.pop(RETURN_VALUES_DICT, None) + * except: + */ + __pyx_t_10 = (__pyx_v_f_locals_back != Py_None); + __pyx_t_11 = (__pyx_t_10 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":649 + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + * f_locals_back.pop(RETURN_VALUES_DICT, None) # <<<<<<<<<<<<<< + * except: + * pydev_log.exception() + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_locals_back, __pyx_n_s_pop); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 649, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 649, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_5 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_9, Py_None}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 649, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_9, Py_None}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 649, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + { + __pyx_t_7 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 649, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_GIVEREF(__pyx_t_9); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_8, __pyx_t_9); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_8, Py_None); + __pyx_t_9 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 649, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":648 + * + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: # <<<<<<<<<<<<<< + * f_locals_back.pop(RETURN_VALUES_DICT, None) + * except: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":642 + * # ENDIF + * try: + * try: # <<<<<<<<<<<<<< + * # Showing return values was turned off, we should remove them from locals dict. + * # The values can be in the current frame or in the back one + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L11_try_end; + __pyx_L6_error:; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":650 + * if f_locals_back is not None: + * f_locals_back.pop(RETURN_VALUES_DICT, None) + * except: # <<<<<<<<<<<<<< + * pydev_log.exception() + * finally: + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._remove_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_4, &__pyx_t_7) < 0) __PYX_ERR(0, 650, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_7); + + /* "_pydevd_bundle/pydevd_cython.pyx":651 + * f_locals_back.pop(RETURN_VALUES_DICT, None) + * except: + * pydev_log.exception() # <<<<<<<<<<<<<< + * finally: + * f_locals_back = None + */ + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 651, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_exception); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 651, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + __pyx_t_9 = (__pyx_t_5) ? __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_t_5) : __Pyx_PyObject_CallNoArg(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 651, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L7_exception_handled; + } + __pyx_L8_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":642 + * # ENDIF + * try: + * try: # <<<<<<<<<<<<<< + * # Showing return values was turned off, we should remove them from locals dict. + * # The values can be in the current frame or in the back one + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L4_error; + __pyx_L7_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + __pyx_L11_try_end:; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":653 + * pydev_log.exception() + * finally: + * f_locals_back = None # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + /*finally:*/ { + /*normal exit:*/{ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f_locals_back, Py_None); + goto __pyx_L5; + } + __pyx_L4_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1) < 0)) __Pyx_ErrFetch(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __pyx_t_8 = __pyx_lineno; __pyx_t_13 = __pyx_clineno; __pyx_t_14 = __pyx_filename; + { + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f_locals_back, Py_None); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_ErrRestore(__pyx_t_3, __pyx_t_2, __pyx_t_1); + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __pyx_lineno = __pyx_t_8; __pyx_clineno = __pyx_t_13; __pyx_filename = __pyx_t_14; + goto __pyx_L1_error; + } + __pyx_L5:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":637 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _remove_return_values(self, main_debugger, frame): # <<<<<<<<<<<<<< + * # ELSE + * # def _remove_return_values(self, main_debugger, frame): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._remove_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_f_locals_back); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":656 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _get_unfiltered_back_frame(self, main_debugger, frame): # <<<<<<<<<<<<<< + * # ELSE + * # def _get_unfiltered_back_frame(self, main_debugger, frame): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__get_unfiltered_back_frame(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_main_debugger, PyObject *__pyx_v_frame) { + PyObject *__pyx_v_f = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_get_unfiltered_back_frame", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":660 + * # def _get_unfiltered_back_frame(self, main_debugger, frame): + * # ENDIF + * f = frame.f_back # <<<<<<<<<<<<<< + * while f is not None: + * if not main_debugger.is_files_filter_enabled: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 660, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_f = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":661 + * # ENDIF + * f = frame.f_back + * while f is not None: # <<<<<<<<<<<<<< + * if not main_debugger.is_files_filter_enabled: + * return f + */ + while (1) { + __pyx_t_2 = (__pyx_v_f != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (!__pyx_t_3) break; + + /* "_pydevd_bundle/pydevd_cython.pyx":662 + * f = frame.f_back + * while f is not None: + * if not main_debugger.is_files_filter_enabled: # <<<<<<<<<<<<<< + * return f + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_is_files_filter_enabled); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 662, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 662, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = ((!__pyx_t_3) != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":663 + * while f is not None: + * if not main_debugger.is_files_filter_enabled: + * return f # <<<<<<<<<<<<<< + * + * else: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_f); + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":662 + * f = frame.f_back + * while f is not None: + * if not main_debugger.is_files_filter_enabled: # <<<<<<<<<<<<<< + * return f + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":666 + * + * else: + * if main_debugger.apply_files_filter(f, f.f_code.co_filename, False): # <<<<<<<<<<<<<< + * f = f.f_back + * + */ + /*else*/ { + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 666, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_code); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 666, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 666, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = NULL; + __pyx_t_7 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_7 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_f, __pyx_t_6, Py_False}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_7, 3+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 666, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_f, __pyx_t_6, Py_False}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_7, 3+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 666, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(3+__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 666, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_f); + __Pyx_GIVEREF(__pyx_v_f); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_7, __pyx_v_f); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_7, __pyx_t_6); + __Pyx_INCREF(Py_False); + __Pyx_GIVEREF(Py_False); + PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_7, Py_False); + __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 666, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 666, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":667 + * else: + * if main_debugger.apply_files_filter(f, f.f_code.co_filename, False): + * f = f.f_back # <<<<<<<<<<<<<< + * + * else: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 667, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_f, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":666 + * + * else: + * if main_debugger.apply_files_filter(f, f.f_code.co_filename, False): # <<<<<<<<<<<<<< + * f = f.f_back + * + */ + goto __pyx_L6; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":670 + * + * else: + * return f # <<<<<<<<<<<<<< + * + * return f + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_f); + __pyx_r = __pyx_v_f; + goto __pyx_L0; + } + __pyx_L6:; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":672 + * return f + * + * return f # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_f); + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":656 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _get_unfiltered_back_frame(self, main_debugger, frame): # <<<<<<<<<<<<<< + * # ELSE + * # def _get_unfiltered_back_frame(self, main_debugger, frame): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._get_unfiltered_back_frame", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_f); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":675 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _is_same_frame(self, target_frame, current_frame): # <<<<<<<<<<<<<< + * cdef PyDBAdditionalThreadInfo info; + * # ELSE + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__is_same_frame(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_target_frame, PyObject *__pyx_v_current_frame) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_info = 0; + PyObject *__pyx_v_f = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_is_same_frame", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":680 + * # def _is_same_frame(self, target_frame, current_frame): + * # ENDIF + * if target_frame is current_frame: # <<<<<<<<<<<<<< + * return True + * + */ + __pyx_t_1 = (__pyx_v_target_frame == __pyx_v_current_frame); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":681 + * # ENDIF + * if target_frame is current_frame: + * return True # <<<<<<<<<<<<<< + * + * info = self._args[2] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":680 + * # def _is_same_frame(self, target_frame, current_frame): + * # ENDIF + * if target_frame is current_frame: # <<<<<<<<<<<<<< + * return True + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":683 + * return True + * + * info = self._args[2] # <<<<<<<<<<<<<< + * if info.pydev_use_scoped_step_frame: + * # If using scoped step we don't check the target, we just need to check + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 683, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 683, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(0, 683, __pyx_L1_error) + __pyx_v_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":684 + * + * info = self._args[2] + * if info.pydev_use_scoped_step_frame: # <<<<<<<<<<<<<< + * # If using scoped step we don't check the target, we just need to check + * # if the current matches the same heuristic where the target was defined. + */ + __pyx_t_2 = (__pyx_v_info->pydev_use_scoped_step_frame != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":687 + * # If using scoped step we don't check the target, we just need to check + * # if the current matches the same heuristic where the target was defined. + * if target_frame is not None and current_frame is not None: # <<<<<<<<<<<<<< + * if target_frame.f_code.co_filename == current_frame.f_code.co_filename: + * # The co_name may be different (it may include the line number), but + */ + __pyx_t_1 = (__pyx_v_target_frame != Py_None); + __pyx_t_4 = (__pyx_t_1 != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L6_bool_binop_done; + } + __pyx_t_4 = (__pyx_v_current_frame != Py_None); + __pyx_t_1 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_1; + __pyx_L6_bool_binop_done:; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":688 + * # if the current matches the same heuristic where the target was defined. + * if target_frame is not None and current_frame is not None: + * if target_frame.f_code.co_filename == current_frame.f_code.co_filename: # <<<<<<<<<<<<<< + * # The co_name may be different (it may include the line number), but + * # the filename must still be the same. + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_target_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 688, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 688, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_current_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 688, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 688, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyObject_RichCompare(__pyx_t_5, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 688, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 688, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":691 + * # The co_name may be different (it may include the line number), but + * # the filename must still be the same. + * f = current_frame.f_back # <<<<<<<<<<<<<< + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + * f = f.f_back + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_current_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 691, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_v_f = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":692 + * # the filename must still be the same. + * f = current_frame.f_back + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: # <<<<<<<<<<<<<< + * f = f.f_back + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + */ + __pyx_t_1 = (__pyx_v_f != Py_None); + __pyx_t_4 = (__pyx_t_1 != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L10_bool_binop_done; + } + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 692, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_name); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 692, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_PYDEVD_IPYTHON_CONTEXT); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 692, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = __Pyx_GetItemInt(__pyx_t_3, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 692, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyObject_RichCompare(__pyx_t_6, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 692, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 692, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_2 = __pyx_t_4; + __pyx_L10_bool_binop_done:; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":693 + * f = current_frame.f_back + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + * f = f.f_back # <<<<<<<<<<<<<< + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + * return True + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_back); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 693, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_f, __pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":694 + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + * f = f.f_back + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: # <<<<<<<<<<<<<< + * return True + * + */ + __pyx_t_4 = (__pyx_v_f != Py_None); + __pyx_t_1 = (__pyx_t_4 != 0); + if (__pyx_t_1) { + } else { + __pyx_t_2 = __pyx_t_1; + goto __pyx_L13_bool_binop_done; + } + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 694, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_name); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 694, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_PYDEVD_IPYTHON_CONTEXT); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 694, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_GetItemInt(__pyx_t_3, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 694, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyObject_RichCompare(__pyx_t_5, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 694, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 694, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_2 = __pyx_t_1; + __pyx_L13_bool_binop_done:; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":695 + * f = f.f_back + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + * return True # <<<<<<<<<<<<<< + * + * return False + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":694 + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + * f = f.f_back + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: # <<<<<<<<<<<<<< + * return True + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":692 + * # the filename must still be the same. + * f = current_frame.f_back + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: # <<<<<<<<<<<<<< + * f = f.f_back + * if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":688 + * # if the current matches the same heuristic where the target was defined. + * if target_frame is not None and current_frame is not None: + * if target_frame.f_code.co_filename == current_frame.f_code.co_filename: # <<<<<<<<<<<<<< + * # The co_name may be different (it may include the line number), but + * # the filename must still be the same. + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":687 + * # If using scoped step we don't check the target, we just need to check + * # if the current matches the same heuristic where the target was defined. + * if target_frame is not None and current_frame is not None: # <<<<<<<<<<<<<< + * if target_frame.f_code.co_filename == current_frame.f_code.co_filename: + * # The co_name may be different (it may include the line number), but + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":684 + * + * info = self._args[2] + * if info.pydev_use_scoped_step_frame: # <<<<<<<<<<<<<< + * # If using scoped step we don't check the target, we just need to check + * # if the current matches the same heuristic where the target was defined. + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":697 + * return True + * + * return False # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":675 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef _is_same_frame(self, target_frame, current_frame): # <<<<<<<<<<<<<< + * cdef PyDBAdditionalThreadInfo info; + * # ELSE + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame._is_same_frame", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_info); + __Pyx_XDECREF(__pyx_v_f); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":700 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cpdef trace_dispatch(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef tuple abs_path_canonical_path_and_base; + * cdef bint is_exception_event; + */ + +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_11trace_dispatch(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispatch(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg, int __pyx_skip_dispatch) { + PyObject *__pyx_v_abs_path_canonical_path_and_base = 0; + int __pyx_v_is_exception_event; + int __pyx_v_has_exception_breakpoints; + int __pyx_v_can_skip; + int __pyx_v_stop; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_info = 0; + int __pyx_v_step_cmd; + int __pyx_v_line; + int __pyx_v_is_line; + int __pyx_v_is_call; + int __pyx_v_is_return; + int __pyx_v_should_stop; + PyObject *__pyx_v_breakpoints_for_file = 0; + PyObject *__pyx_v_stop_info = 0; + PyObject *__pyx_v_curr_func_name = 0; + int __pyx_v_exist_result; + PyObject *__pyx_v_frame_skips_cache = 0; + PyObject *__pyx_v_frame_cache_key = 0; + PyObject *__pyx_v_line_cache_key = 0; + int __pyx_v_breakpoints_in_line_cache; + int __pyx_v_breakpoints_in_frame_cache; + int __pyx_v_has_breakpoint_in_frame; + int __pyx_v_bp_line; + PyObject *__pyx_v_bp = 0; + int __pyx_v_pydev_smart_parent_offset; + int __pyx_v_pydev_smart_child_offset; + PyObject *__pyx_v_pydev_smart_step_into_variants = 0; + PyObject *__pyx_v_main_debugger = NULL; + PyObject *__pyx_v_thread = NULL; + PyObject *__pyx_v_plugin_manager = NULL; + PyObject *__pyx_v_stop_frame = NULL; + PyObject *__pyx_v_function_breakpoint_on_call_event = NULL; + PyObject *__pyx_v_returns_cache_key = NULL; + PyObject *__pyx_v_return_lines = NULL; + PyObject *__pyx_v_x = NULL; + PyObject *__pyx_v_f = NULL; + PyObject *__pyx_v_func_lines = NULL; + PyObject *__pyx_v_offset_and_lineno = NULL; + PyObject *__pyx_v_flag = NULL; + PyObject *__pyx_v_breakpoint = NULL; + PyObject *__pyx_v_stop_reason = NULL; + PyObject *__pyx_v_bp_type = NULL; + PyObject *__pyx_v_new_frame = NULL; + PyObject *__pyx_v_result = NULL; + PyObject *__pyx_v_cmd = NULL; + PyObject *__pyx_v_eval_result = NULL; + long __pyx_v_should_skip; + PyObject *__pyx_v_plugin_stop = NULL; + PyObject *__pyx_v_force_check_project_scope = NULL; + PyObject *__pyx_v_filename = NULL; + PyObject *__pyx_v_f2 = NULL; + PyObject *__pyx_v_back = NULL; + PyObject *__pyx_v_smart_step_into_variant = NULL; + PyObject *__pyx_v_children_variants = NULL; + PyObject *__pyx_v_f_code = NULL; + CYTHON_UNUSED PyObject *__pyx_v_stopped_on_plugin = NULL; + PyObject *__pyx_v_back_absolute_filename = NULL; + CYTHON_UNUSED PyObject *__pyx_v__ = NULL; + PyObject *__pyx_v_base = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + int __pyx_t_10; + int __pyx_t_11; + Py_ssize_t __pyx_t_12; + PyObject *(*__pyx_t_13)(PyObject *); + int __pyx_t_14; + PyObject *(*__pyx_t_15)(PyObject *); + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + PyObject *__pyx_t_18 = NULL; + int __pyx_t_19; + Py_ssize_t __pyx_t_20; + PyObject *__pyx_t_21 = NULL; + char const *__pyx_t_22; + PyObject *__pyx_t_23 = NULL; + PyObject *__pyx_t_24 = NULL; + PyObject *__pyx_t_25 = NULL; + PyObject *__pyx_t_26 = NULL; + PyObject *__pyx_t_27 = NULL; + PyObject *__pyx_t_28 = NULL; + int __pyx_t_29; + PyObject *__pyx_t_30 = NULL; + char const *__pyx_t_31; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("trace_dispatch", 0); + __Pyx_INCREF(__pyx_v_frame); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_11trace_dispatch)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_6 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_5, __pyx_v_arg); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "_pydevd_bundle/pydevd_cython.pyx":741 + * + * # DEBUG = '_debugger_case_generator.py' in frame.f_code.co_filename + * main_debugger, abs_path_canonical_path_and_base, info, thread, frame_skips_cache, frame_cache_key = self._args # <<<<<<<<<<<<<< + * # if DEBUG: print('frame trace_dispatch %s %s %s %s %s %s, stop: %s' % (frame.f_lineno, frame.f_code.co_name, frame.f_code.co_filename, event, constant_to_str(info.pydev_step_cmd), arg, info.pydev_step_stop)) + * try: + */ + __pyx_t_1 = __pyx_v_self->_args; + __Pyx_INCREF(__pyx_t_1); + if (likely(__pyx_t_1 != Py_None)) { + PyObject* sequence = __pyx_t_1; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 6)) { + if (size > 6) __Pyx_RaiseTooManyValuesError(6); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 741, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_2 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 2); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 3); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 4); + __pyx_t_8 = PyTuple_GET_ITEM(sequence, 5); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + #else + { + Py_ssize_t i; + PyObject** temps[6] = {&__pyx_t_2,&__pyx_t_3,&__pyx_t_6,&__pyx_t_4,&__pyx_t_7,&__pyx_t_8}; + for (i=0; i < 6; i++) { + PyObject* item = PySequence_ITEM(sequence, i); if (unlikely(!item)) __PYX_ERR(0, 741, __pyx_L1_error) + __Pyx_GOTREF(item); + *(temps[i]) = item; + } + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(0, 741, __pyx_L1_error) + } + if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 741, __pyx_L1_error) + if (!(likely(((__pyx_t_6) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_6, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(0, 741, __pyx_L1_error) + if (!(likely(PyDict_CheckExact(__pyx_t_7))||((__pyx_t_7) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_7)->tp_name), 0))) __PYX_ERR(0, 741, __pyx_L1_error) + __pyx_v_main_debugger = __pyx_t_2; + __pyx_t_2 = 0; + __pyx_v_abs_path_canonical_path_and_base = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + __pyx_v_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_6); + __pyx_t_6 = 0; + __pyx_v_thread = __pyx_t_4; + __pyx_t_4 = 0; + __pyx_v_frame_skips_cache = ((PyObject*)__pyx_t_7); + __pyx_t_7 = 0; + __pyx_v_frame_cache_key = __pyx_t_8; + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":743 + * main_debugger, abs_path_canonical_path_and_base, info, thread, frame_skips_cache, frame_cache_key = self._args + * # if DEBUG: print('frame trace_dispatch %s %s %s %s %s %s, stop: %s' % (frame.f_lineno, frame.f_code.co_name, frame.f_code.co_filename, event, constant_to_str(info.pydev_step_cmd), arg, info.pydev_step_stop)) + * try: # <<<<<<<<<<<<<< + * info.is_tracing += 1 + * + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":744 + * # if DEBUG: print('frame trace_dispatch %s %s %s %s %s %s, stop: %s' % (frame.f_lineno, frame.f_code.co_name, frame.f_code.co_filename, event, constant_to_str(info.pydev_step_cmd), arg, info.pydev_step_stop)) + * try: + * info.is_tracing += 1 # <<<<<<<<<<<<<< + * + * # TODO: This shouldn't be needed. The fact that frame.f_lineno + */ + __pyx_v_info->is_tracing = (__pyx_v_info->is_tracing + 1); + + /* "_pydevd_bundle/pydevd_cython.pyx":749 + * # is None seems like a bug in Python 3.11. + * # Reported in: https://github.com/python/cpython/issues/94485 + * line = frame.f_lineno or 0 # Workaround or case where frame.f_lineno is None # <<<<<<<<<<<<<< + * line_cache_key = (frame_cache_key, line) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 749, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 749, __pyx_L4_error) + if (!__pyx_t_9) { + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + __pyx_t_10 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_10 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 749, __pyx_L4_error) + __pyx_t_5 = __pyx_t_10; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L6_bool_binop_done; + } + __pyx_t_5 = 0; + __pyx_L6_bool_binop_done:; + __pyx_v_line = __pyx_t_5; + + /* "_pydevd_bundle/pydevd_cython.pyx":750 + * # Reported in: https://github.com/python/cpython/issues/94485 + * line = frame.f_lineno or 0 # Workaround or case where frame.f_lineno is None + * line_cache_key = (frame_cache_key, line) # <<<<<<<<<<<<<< + * + * if main_debugger.pydb_disposed: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 750, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = PyTuple_New(2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 750, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(__pyx_v_frame_cache_key); + __Pyx_GIVEREF(__pyx_v_frame_cache_key); + PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_v_frame_cache_key); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_v_line_cache_key = ((PyObject*)__pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":752 + * line_cache_key = (frame_cache_key, line) + * + * if main_debugger.pydb_disposed: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_pydb_disposed); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 752, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 752, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":753 + * + * if main_debugger.pydb_disposed: + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * + * plugin_manager = main_debugger.plugin + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 753, __pyx_L4_error) + if ((__pyx_t_9 != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_8 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 753, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __pyx_t_1; + __pyx_t_1 = 0; + } + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":752 + * line_cache_key = (frame_cache_key, line) + * + * if main_debugger.pydb_disposed: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":755 + * return None if event == 'call' else NO_FTRACE + * + * plugin_manager = main_debugger.plugin # <<<<<<<<<<<<<< + * has_exception_breakpoints = ( + * main_debugger.break_on_caught_exceptions + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_plugin); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 755, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_v_plugin_manager = __pyx_t_8; + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":757 + * plugin_manager = main_debugger.plugin + * has_exception_breakpoints = ( + * main_debugger.break_on_caught_exceptions # <<<<<<<<<<<<<< + * or main_debugger.break_on_user_uncaught_exceptions + * or main_debugger.has_plugin_exception_breaks) + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_caught_exceptions); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 757, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 757, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (!__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L9_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":758 + * has_exception_breakpoints = ( + * main_debugger.break_on_caught_exceptions + * or main_debugger.break_on_user_uncaught_exceptions # <<<<<<<<<<<<<< + * or main_debugger.has_plugin_exception_breaks) + * + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_user_uncaught_exception); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 758, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 758, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (!__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L9_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":759 + * main_debugger.break_on_caught_exceptions + * or main_debugger.break_on_user_uncaught_exceptions + * or main_debugger.has_plugin_exception_breaks) # <<<<<<<<<<<<<< + * + * stop_frame = info.pydev_step_stop + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_exception_breaks); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 759, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 759, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_9 = __pyx_t_11; + __pyx_L9_bool_binop_done:; + __pyx_v_has_exception_breakpoints = __pyx_t_9; + + /* "_pydevd_bundle/pydevd_cython.pyx":761 + * or main_debugger.has_plugin_exception_breaks) + * + * stop_frame = info.pydev_step_stop # <<<<<<<<<<<<<< + * step_cmd = info.pydev_step_cmd + * function_breakpoint_on_call_event = None + */ + __pyx_t_8 = __pyx_v_info->pydev_step_stop; + __Pyx_INCREF(__pyx_t_8); + __pyx_v_stop_frame = __pyx_t_8; + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":762 + * + * stop_frame = info.pydev_step_stop + * step_cmd = info.pydev_step_cmd # <<<<<<<<<<<<<< + * function_breakpoint_on_call_event = None + * + */ + __pyx_t_5 = __pyx_v_info->pydev_step_cmd; + __pyx_v_step_cmd = __pyx_t_5; + + /* "_pydevd_bundle/pydevd_cython.pyx":763 + * stop_frame = info.pydev_step_stop + * step_cmd = info.pydev_step_cmd + * function_breakpoint_on_call_event = None # <<<<<<<<<<<<<< + * + * if frame.f_code.co_flags & 0xa0: # 0xa0 == CO_GENERATOR = 0x20 | CO_COROUTINE = 0x80 + */ + __Pyx_INCREF(Py_None); + __pyx_v_function_breakpoint_on_call_event = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":765 + * function_breakpoint_on_call_event = None + * + * if frame.f_code.co_flags & 0xa0: # 0xa0 == CO_GENERATOR = 0x20 | CO_COROUTINE = 0x80 # <<<<<<<<<<<<<< + * # Dealing with coroutines and generators: + * # When in a coroutine we change the perceived event to the debugger because + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 765, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_co_flags); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 765, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = __Pyx_PyInt_AndObjC(__pyx_t_1, __pyx_int_160, 0xa0, 0, 0); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 765, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 765, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":769 + * # When in a coroutine we change the perceived event to the debugger because + * # a call, StopIteration exception and return are usually just pausing/unpausing it. + * if event == 'line': # <<<<<<<<<<<<<< + * is_line = True + * is_call = False + */ + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_line, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 769, __pyx_L4_error) + __pyx_t_11 = (__pyx_t_9 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":770 + * # a call, StopIteration exception and return are usually just pausing/unpausing it. + * if event == 'line': + * is_line = True # <<<<<<<<<<<<<< + * is_call = False + * is_return = False + */ + __pyx_v_is_line = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":771 + * if event == 'line': + * is_line = True + * is_call = False # <<<<<<<<<<<<<< + * is_return = False + * is_exception_event = False + */ + __pyx_v_is_call = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":772 + * is_line = True + * is_call = False + * is_return = False # <<<<<<<<<<<<<< + * is_exception_event = False + * + */ + __pyx_v_is_return = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":773 + * is_call = False + * is_return = False + * is_exception_event = False # <<<<<<<<<<<<<< + * + * elif event == 'return': + */ + __pyx_v_is_exception_event = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":769 + * # When in a coroutine we change the perceived event to the debugger because + * # a call, StopIteration exception and return are usually just pausing/unpausing it. + * if event == 'line': # <<<<<<<<<<<<<< + * is_line = True + * is_call = False + */ + goto __pyx_L13; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":775 + * is_exception_event = False + * + * elif event == 'return': # <<<<<<<<<<<<<< + * is_line = False + * is_call = False + */ + __pyx_t_11 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_return, Py_EQ)); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 775, __pyx_L4_error) + __pyx_t_9 = (__pyx_t_11 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":776 + * + * elif event == 'return': + * is_line = False # <<<<<<<<<<<<<< + * is_call = False + * is_return = True + */ + __pyx_v_is_line = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":777 + * elif event == 'return': + * is_line = False + * is_call = False # <<<<<<<<<<<<<< + * is_return = True + * is_exception_event = False + */ + __pyx_v_is_call = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":778 + * is_line = False + * is_call = False + * is_return = True # <<<<<<<<<<<<<< + * is_exception_event = False + * + */ + __pyx_v_is_return = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":779 + * is_call = False + * is_return = True + * is_exception_event = False # <<<<<<<<<<<<<< + * + * returns_cache_key = (frame_cache_key, 'returns') + */ + __pyx_v_is_exception_event = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":781 + * is_exception_event = False + * + * returns_cache_key = (frame_cache_key, 'returns') # <<<<<<<<<<<<<< + * return_lines = frame_skips_cache.get(returns_cache_key) + * if return_lines is None: + */ + __pyx_t_8 = PyTuple_New(2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 781, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(__pyx_v_frame_cache_key); + __Pyx_GIVEREF(__pyx_v_frame_cache_key); + PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_v_frame_cache_key); + __Pyx_INCREF(__pyx_n_s_returns); + __Pyx_GIVEREF(__pyx_n_s_returns); + PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_n_s_returns); + __pyx_v_returns_cache_key = ((PyObject*)__pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":782 + * + * returns_cache_key = (frame_cache_key, 'returns') + * return_lines = frame_skips_cache.get(returns_cache_key) # <<<<<<<<<<<<<< + * if return_lines is None: + * # Note: we're collecting the return lines by inspecting the bytecode as + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(0, 782, __pyx_L4_error) + } + __pyx_t_8 = __Pyx_PyDict_GetItemDefault(__pyx_v_frame_skips_cache, __pyx_v_returns_cache_key, Py_None); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 782, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_v_return_lines = __pyx_t_8; + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":783 + * returns_cache_key = (frame_cache_key, 'returns') + * return_lines = frame_skips_cache.get(returns_cache_key) + * if return_lines is None: # <<<<<<<<<<<<<< + * # Note: we're collecting the return lines by inspecting the bytecode as + * # there are multiple returns and multiple stop iterations when awaiting and + */ + __pyx_t_9 = (__pyx_v_return_lines == Py_None); + __pyx_t_11 = (__pyx_t_9 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":788 + * # it doesn't give any clear indication when a coroutine or generator is + * # finishing or just pausing. + * return_lines = set() # <<<<<<<<<<<<<< + * for x in main_debugger.collect_return_info(frame.f_code): + * # Note: cython does not support closures in cpdefs (so we can't use + */ + __pyx_t_8 = PySet_New(0); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 788, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF_SET(__pyx_v_return_lines, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":789 + * # finishing or just pausing. + * return_lines = set() + * for x in main_debugger.collect_return_info(frame.f_code): # <<<<<<<<<<<<<< + * # Note: cython does not support closures in cpdefs (so we can't use + * # a list comprehension). + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_collect_return_info); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 789, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 789, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_8 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_4, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 789, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (likely(PyList_CheckExact(__pyx_t_8)) || PyTuple_CheckExact(__pyx_t_8)) { + __pyx_t_1 = __pyx_t_8; __Pyx_INCREF(__pyx_t_1); __pyx_t_12 = 0; + __pyx_t_13 = NULL; + } else { + __pyx_t_12 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 789, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_13 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 789, __pyx_L4_error) + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + for (;;) { + if (likely(!__pyx_t_13)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_12 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_8 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_12); __Pyx_INCREF(__pyx_t_8); __pyx_t_12++; if (unlikely(0 < 0)) __PYX_ERR(0, 789, __pyx_L4_error) + #else + __pyx_t_8 = PySequence_ITEM(__pyx_t_1, __pyx_t_12); __pyx_t_12++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 789, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + #endif + } else { + if (__pyx_t_12 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_8 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_12); __Pyx_INCREF(__pyx_t_8); __pyx_t_12++; if (unlikely(0 < 0)) __PYX_ERR(0, 789, __pyx_L4_error) + #else + __pyx_t_8 = PySequence_ITEM(__pyx_t_1, __pyx_t_12); __pyx_t_12++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 789, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + #endif + } + } else { + __pyx_t_8 = __pyx_t_13(__pyx_t_1); + if (unlikely(!__pyx_t_8)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 789, __pyx_L4_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_8); + } + __Pyx_XDECREF_SET(__pyx_v_x, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":792 + * # Note: cython does not support closures in cpdefs (so we can't use + * # a list comprehension). + * return_lines.add(x.return_line) # <<<<<<<<<<<<<< + * + * frame_skips_cache[returns_cache_key] = return_lines + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_return_lines, __pyx_n_s_add); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 792, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_x, __pyx_n_s_return_line); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 792, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_8 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_6, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 792, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":789 + * # finishing or just pausing. + * return_lines = set() + * for x in main_debugger.collect_return_info(frame.f_code): # <<<<<<<<<<<<<< + * # Note: cython does not support closures in cpdefs (so we can't use + * # a list comprehension). + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":794 + * return_lines.add(x.return_line) + * + * frame_skips_cache[returns_cache_key] = return_lines # <<<<<<<<<<<<<< + * + * if line not in return_lines: + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 794, __pyx_L4_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_frame_skips_cache, __pyx_v_returns_cache_key, __pyx_v_return_lines) < 0)) __PYX_ERR(0, 794, __pyx_L4_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":783 + * returns_cache_key = (frame_cache_key, 'returns') + * return_lines = frame_skips_cache.get(returns_cache_key) + * if return_lines is None: # <<<<<<<<<<<<<< + * # Note: we're collecting the return lines by inspecting the bytecode as + * # there are multiple returns and multiple stop iterations when awaiting and + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":796 + * frame_skips_cache[returns_cache_key] = return_lines + * + * if line not in return_lines: # <<<<<<<<<<<<<< + * # Not really a return (coroutine/generator paused). + * return self.trace_dispatch + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 796, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_11 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_v_return_lines, Py_NE)); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 796, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_9 = (__pyx_t_11 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":798 + * if line not in return_lines: + * # Not really a return (coroutine/generator paused). + * return self.trace_dispatch # <<<<<<<<<<<<<< + * else: + * if self.exc_info: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 798, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":796 + * frame_skips_cache[returns_cache_key] = return_lines + * + * if line not in return_lines: # <<<<<<<<<<<<<< + * # Not really a return (coroutine/generator paused). + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":800 + * return self.trace_dispatch + * else: + * if self.exc_info: # <<<<<<<<<<<<<< + * self.handle_user_exception(frame) + * return self.trace_dispatch + */ + /*else*/ { + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_self->exc_info); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 800, __pyx_L4_error) + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":801 + * else: + * if self.exc_info: + * self.handle_user_exception(frame) # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_handle_user_exception); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 801, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_7, __pyx_v_frame) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_v_frame); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 801, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":802 + * if self.exc_info: + * self.handle_user_exception(frame) + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * # Tricky handling: usually when we're on a frame which is about to exit + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 802, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":800 + * return self.trace_dispatch + * else: + * if self.exc_info: # <<<<<<<<<<<<<< + * self.handle_user_exception(frame) + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":820 + * # as the return shouldn't mean that we've actually completed executing a + * # frame in this case). + * if stop_frame is frame and not info.pydev_use_scoped_step_frame: # <<<<<<<<<<<<<< + * if step_cmd in (108, 159, 107, 144): + * f = self._get_unfiltered_back_frame(main_debugger, frame) + */ + __pyx_t_11 = (__pyx_v_stop_frame == __pyx_v_frame); + __pyx_t_14 = (__pyx_t_11 != 0); + if (__pyx_t_14) { + } else { + __pyx_t_9 = __pyx_t_14; + goto __pyx_L20_bool_binop_done; + } + __pyx_t_14 = ((!(__pyx_v_info->pydev_use_scoped_step_frame != 0)) != 0); + __pyx_t_9 = __pyx_t_14; + __pyx_L20_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":821 + * # frame in this case). + * if stop_frame is frame and not info.pydev_use_scoped_step_frame: + * if step_cmd in (108, 159, 107, 144): # <<<<<<<<<<<<<< + * f = self._get_unfiltered_back_frame(main_debugger, frame) + * if f is not None: + */ + switch (__pyx_v_step_cmd) { + case 0x6C: + case 0x9F: + case 0x6B: + case 0x90: + + /* "_pydevd_bundle/pydevd_cython.pyx":822 + * if stop_frame is frame and not info.pydev_use_scoped_step_frame: + * if step_cmd in (108, 159, 107, 144): + * f = self._get_unfiltered_back_frame(main_debugger, frame) # <<<<<<<<<<<<<< + * if f is not None: + * info.pydev_step_cmd = 206 + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_get_unfiltered_back_frame(__pyx_v_self, __pyx_v_main_debugger, __pyx_v_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 822, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_f = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":823 + * if step_cmd in (108, 159, 107, 144): + * f = self._get_unfiltered_back_frame(main_debugger, frame) + * if f is not None: # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 206 + * info.pydev_step_stop = f + */ + __pyx_t_9 = (__pyx_v_f != Py_None); + __pyx_t_14 = (__pyx_t_9 != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":824 + * f = self._get_unfiltered_back_frame(main_debugger, frame) + * if f is not None: + * info.pydev_step_cmd = 206 # <<<<<<<<<<<<<< + * info.pydev_step_stop = f + * else: + */ + __pyx_v_info->pydev_step_cmd = 0xCE; + + /* "_pydevd_bundle/pydevd_cython.pyx":825 + * if f is not None: + * info.pydev_step_cmd = 206 + * info.pydev_step_stop = f # <<<<<<<<<<<<<< + * else: + * if step_cmd == 108: + */ + __Pyx_INCREF(__pyx_v_f); + __Pyx_GIVEREF(__pyx_v_f); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = __pyx_v_f; + + /* "_pydevd_bundle/pydevd_cython.pyx":823 + * if step_cmd in (108, 159, 107, 144): + * f = self._get_unfiltered_back_frame(main_debugger, frame) + * if f is not None: # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 206 + * info.pydev_step_stop = f + */ + goto __pyx_L22; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":827 + * info.pydev_step_stop = f + * else: + * if step_cmd == 108: # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 107 + * info.pydev_step_stop = None + */ + /*else*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":831 + * info.pydev_step_stop = None + * + * elif step_cmd == 159: # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 144 + * info.pydev_step_stop = None + */ + switch (__pyx_v_step_cmd) { + case 0x6C: + + /* "_pydevd_bundle/pydevd_cython.pyx":828 + * else: + * if step_cmd == 108: + * info.pydev_step_cmd = 107 # <<<<<<<<<<<<<< + * info.pydev_step_stop = None + * + */ + __pyx_v_info->pydev_step_cmd = 0x6B; + + /* "_pydevd_bundle/pydevd_cython.pyx":829 + * if step_cmd == 108: + * info.pydev_step_cmd = 107 + * info.pydev_step_stop = None # <<<<<<<<<<<<<< + * + * elif step_cmd == 159: + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":827 + * info.pydev_step_stop = f + * else: + * if step_cmd == 108: # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 107 + * info.pydev_step_stop = None + */ + break; + case 0x9F: + + /* "_pydevd_bundle/pydevd_cython.pyx":832 + * + * elif step_cmd == 159: + * info.pydev_step_cmd = 144 # <<<<<<<<<<<<<< + * info.pydev_step_stop = None + * + */ + __pyx_v_info->pydev_step_cmd = 0x90; + + /* "_pydevd_bundle/pydevd_cython.pyx":833 + * elif step_cmd == 159: + * info.pydev_step_cmd = 144 + * info.pydev_step_stop = None # <<<<<<<<<<<<<< + * + * elif step_cmd == 206: + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":831 + * info.pydev_step_stop = None + * + * elif step_cmd == 159: # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 144 + * info.pydev_step_stop = None + */ + break; + default: break; + } + } + __pyx_L22:; + + /* "_pydevd_bundle/pydevd_cython.pyx":821 + * # frame in this case). + * if stop_frame is frame and not info.pydev_use_scoped_step_frame: + * if step_cmd in (108, 159, 107, 144): # <<<<<<<<<<<<<< + * f = self._get_unfiltered_back_frame(main_debugger, frame) + * if f is not None: + */ + break; + case 0xCE: + + /* "_pydevd_bundle/pydevd_cython.pyx":837 + * elif step_cmd == 206: + * # We're exiting this one, so, mark the new coroutine context. + * f = self._get_unfiltered_back_frame(main_debugger, frame) # <<<<<<<<<<<<<< + * if f is not None: + * info.pydev_step_stop = f + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_get_unfiltered_back_frame(__pyx_v_self, __pyx_v_main_debugger, __pyx_v_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 837, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_f = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":838 + * # We're exiting this one, so, mark the new coroutine context. + * f = self._get_unfiltered_back_frame(main_debugger, frame) + * if f is not None: # <<<<<<<<<<<<<< + * info.pydev_step_stop = f + * else: + */ + __pyx_t_14 = (__pyx_v_f != Py_None); + __pyx_t_9 = (__pyx_t_14 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":839 + * f = self._get_unfiltered_back_frame(main_debugger, frame) + * if f is not None: + * info.pydev_step_stop = f # <<<<<<<<<<<<<< + * else: + * info.pydev_step_cmd = 107 + */ + __Pyx_INCREF(__pyx_v_f); + __Pyx_GIVEREF(__pyx_v_f); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = __pyx_v_f; + + /* "_pydevd_bundle/pydevd_cython.pyx":838 + * # We're exiting this one, so, mark the new coroutine context. + * f = self._get_unfiltered_back_frame(main_debugger, frame) + * if f is not None: # <<<<<<<<<<<<<< + * info.pydev_step_stop = f + * else: + */ + goto __pyx_L23; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":841 + * info.pydev_step_stop = f + * else: + * info.pydev_step_cmd = 107 # <<<<<<<<<<<<<< + * info.pydev_step_stop = None + * + */ + /*else*/ { + __pyx_v_info->pydev_step_cmd = 0x6B; + + /* "_pydevd_bundle/pydevd_cython.pyx":842 + * else: + * info.pydev_step_cmd = 107 + * info.pydev_step_stop = None # <<<<<<<<<<<<<< + * + * elif event == 'exception': + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = Py_None; + } + __pyx_L23:; + + /* "_pydevd_bundle/pydevd_cython.pyx":835 + * info.pydev_step_stop = None + * + * elif step_cmd == 206: # <<<<<<<<<<<<<< + * # We're exiting this one, so, mark the new coroutine context. + * f = self._get_unfiltered_back_frame(main_debugger, frame) + */ + break; + default: break; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":820 + * # as the return shouldn't mean that we've actually completed executing a + * # frame in this case). + * if stop_frame is frame and not info.pydev_use_scoped_step_frame: # <<<<<<<<<<<<<< + * if step_cmd in (108, 159, 107, 144): + * f = self._get_unfiltered_back_frame(main_debugger, frame) + */ + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":775 + * is_exception_event = False + * + * elif event == 'return': # <<<<<<<<<<<<<< + * is_line = False + * is_call = False + */ + goto __pyx_L13; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":844 + * info.pydev_step_stop = None + * + * elif event == 'exception': # <<<<<<<<<<<<<< + * breakpoints_for_file = None + * if has_exception_breakpoints: + */ + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_exception, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 844, __pyx_L4_error) + __pyx_t_14 = (__pyx_t_9 != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":845 + * + * elif event == 'exception': + * breakpoints_for_file = None # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + */ + __Pyx_INCREF(Py_None); + __pyx_v_breakpoints_for_file = ((PyObject*)Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":846 + * elif event == 'exception': + * breakpoints_for_file = None + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: + */ + __pyx_t_14 = (__pyx_v_has_exception_breakpoints != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":847 + * breakpoints_for_file = None + * if has_exception_breakpoints: + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) # <<<<<<<<<<<<<< + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_should_stop_on_exception(__pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 847, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 847, __pyx_L4_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_8 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_8 = PyList_GET_ITEM(sequence, 0); + __pyx_t_7 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + #else + __pyx_t_8 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 847, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 847, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_4 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 847, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_15 = Py_TYPE(__pyx_t_4)->tp_iternext; + index = 0; __pyx_t_8 = __pyx_t_15(__pyx_t_4); if (unlikely(!__pyx_t_8)) goto __pyx_L25_unpacking_failed; + __Pyx_GOTREF(__pyx_t_8); + index = 1; __pyx_t_7 = __pyx_t_15(__pyx_t_4); if (unlikely(!__pyx_t_7)) goto __pyx_L25_unpacking_failed; + __Pyx_GOTREF(__pyx_t_7); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_15(__pyx_t_4), 2) < 0) __PYX_ERR(0, 847, __pyx_L4_error) + __pyx_t_15 = NULL; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L26_unpacking_done; + __pyx_L25_unpacking_failed:; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_15 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 847, __pyx_L4_error) + __pyx_L26_unpacking_done:; + } + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely((__pyx_t_14 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 847, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_should_stop = __pyx_t_14; + __Pyx_DECREF_SET(__pyx_v_frame, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":848 + * if has_exception_breakpoints: + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: # <<<<<<<<<<<<<< + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch + */ + __pyx_t_14 = (__pyx_v_should_stop != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":849 + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_EXCEPTION_TYPE_HANDLED); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 849, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 849, __pyx_L4_error) + __pyx_t_7 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_handle_exception(__pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg, ((PyObject*)__pyx_t_1)); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 849, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 849, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":850 + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * return self.trace_dispatch + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 850, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_r = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":849 + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":848 + * if has_exception_breakpoints: + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: # <<<<<<<<<<<<<< + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":846 + * elif event == 'exception': + * breakpoints_for_file = None + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":852 + * return self.trace_dispatch + * + * return self.trace_dispatch # <<<<<<<<<<<<<< + * else: + * # event == 'call' or event == 'c_XXX' + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 852, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_r = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":844 + * info.pydev_step_stop = None + * + * elif event == 'exception': # <<<<<<<<<<<<<< + * breakpoints_for_file = None + * if has_exception_breakpoints: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":855 + * else: + * # event == 'call' or event == 'c_XXX' + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * else: # Not coroutine nor generator + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 855, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_r = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L3_return; + } + __pyx_L13:; + + /* "_pydevd_bundle/pydevd_cython.pyx":765 + * function_breakpoint_on_call_event = None + * + * if frame.f_code.co_flags & 0xa0: # 0xa0 == CO_GENERATOR = 0x20 | CO_COROUTINE = 0x80 # <<<<<<<<<<<<<< + * # Dealing with coroutines and generators: + * # When in a coroutine we change the perceived event to the debugger because + */ + goto __pyx_L12; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":858 + * + * else: # Not coroutine nor generator + * if event == 'line': # <<<<<<<<<<<<<< + * is_line = True + * is_call = False + */ + /*else*/ { + __pyx_t_14 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_line, Py_EQ)); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 858, __pyx_L4_error) + __pyx_t_9 = (__pyx_t_14 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":859 + * else: # Not coroutine nor generator + * if event == 'line': + * is_line = True # <<<<<<<<<<<<<< + * is_call = False + * is_return = False + */ + __pyx_v_is_line = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":860 + * if event == 'line': + * is_line = True + * is_call = False # <<<<<<<<<<<<<< + * is_return = False + * is_exception_event = False + */ + __pyx_v_is_call = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":861 + * is_line = True + * is_call = False + * is_return = False # <<<<<<<<<<<<<< + * is_exception_event = False + * + */ + __pyx_v_is_return = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":862 + * is_call = False + * is_return = False + * is_exception_event = False # <<<<<<<<<<<<<< + * + * elif event == 'return': + */ + __pyx_v_is_exception_event = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":858 + * + * else: # Not coroutine nor generator + * if event == 'line': # <<<<<<<<<<<<<< + * is_line = True + * is_call = False + */ + goto __pyx_L29; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":864 + * is_exception_event = False + * + * elif event == 'return': # <<<<<<<<<<<<<< + * is_line = False + * is_return = True + */ + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_return, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 864, __pyx_L4_error) + __pyx_t_14 = (__pyx_t_9 != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":865 + * + * elif event == 'return': + * is_line = False # <<<<<<<<<<<<<< + * is_return = True + * is_call = False + */ + __pyx_v_is_line = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":866 + * elif event == 'return': + * is_line = False + * is_return = True # <<<<<<<<<<<<<< + * is_call = False + * is_exception_event = False + */ + __pyx_v_is_return = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":867 + * is_line = False + * is_return = True + * is_call = False # <<<<<<<<<<<<<< + * is_exception_event = False + * + */ + __pyx_v_is_call = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":868 + * is_return = True + * is_call = False + * is_exception_event = False # <<<<<<<<<<<<<< + * + * # If we are in single step mode and something causes us to exit the current frame, we need to make sure we break + */ + __pyx_v_is_exception_event = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":877 + * # @DontTrace comment. + * if ( + * stop_frame is frame and # <<<<<<<<<<<<<< + * not info.pydev_use_scoped_step_frame and is_return and + * step_cmd in (108, 109, 159, 160, 128) + */ + __pyx_t_9 = (__pyx_v_stop_frame == __pyx_v_frame); + __pyx_t_11 = (__pyx_t_9 != 0); + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":878 + * if ( + * stop_frame is frame and + * not info.pydev_use_scoped_step_frame and is_return and # <<<<<<<<<<<<<< + * step_cmd in (108, 109, 159, 160, 128) + * ): + */ + __pyx_t_11 = ((!(__pyx_v_info->pydev_use_scoped_step_frame != 0)) != 0); + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L31_bool_binop_done; + } + __pyx_t_11 = (__pyx_v_is_return != 0); + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":879 + * stop_frame is frame and + * not info.pydev_use_scoped_step_frame and is_return and + * step_cmd in (108, 109, 159, 160, 128) # <<<<<<<<<<<<<< + * ): + * + */ + switch (__pyx_v_step_cmd) { + case 0x6C: + case 0x6D: + case 0x9F: + case 0xA0: + case 0x80: + __pyx_t_11 = 1; + break; + default: + __pyx_t_11 = 0; + break; + } + __pyx_t_9 = (__pyx_t_11 != 0); + __pyx_t_14 = __pyx_t_9; + __pyx_L31_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":876 + * # Note: this is especially troublesome when we're skipping code with the + * # @DontTrace comment. + * if ( # <<<<<<<<<<<<<< + * stop_frame is frame and + * not info.pydev_use_scoped_step_frame and is_return and + */ + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":882 + * ): + * + * if step_cmd in (108, 109, 128): # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 107 + * else: + */ + switch (__pyx_v_step_cmd) { + case 0x6C: + case 0x6D: + case 0x80: + + /* "_pydevd_bundle/pydevd_cython.pyx":883 + * + * if step_cmd in (108, 109, 128): + * info.pydev_step_cmd = 107 # <<<<<<<<<<<<<< + * else: + * info.pydev_step_cmd = 144 + */ + __pyx_v_info->pydev_step_cmd = 0x6B; + + /* "_pydevd_bundle/pydevd_cython.pyx":882 + * ): + * + * if step_cmd in (108, 109, 128): # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 107 + * else: + */ + break; + default: + + /* "_pydevd_bundle/pydevd_cython.pyx":885 + * info.pydev_step_cmd = 107 + * else: + * info.pydev_step_cmd = 144 # <<<<<<<<<<<<<< + * info.pydev_step_stop = None + * + */ + __pyx_v_info->pydev_step_cmd = 0x90; + break; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":886 + * else: + * info.pydev_step_cmd = 144 + * info.pydev_step_stop = None # <<<<<<<<<<<<<< + * + * if self.exc_info: + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":876 + * # Note: this is especially troublesome when we're skipping code with the + * # @DontTrace comment. + * if ( # <<<<<<<<<<<<<< + * stop_frame is frame and + * not info.pydev_use_scoped_step_frame and is_return and + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":888 + * info.pydev_step_stop = None + * + * if self.exc_info: # <<<<<<<<<<<<<< + * if self.handle_user_exception(frame): + * return self.trace_dispatch + */ + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_v_self->exc_info); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 888, __pyx_L4_error) + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":889 + * + * if self.exc_info: + * if self.handle_user_exception(frame): # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_handle_user_exception); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 889, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_7 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_8, __pyx_v_frame) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_frame); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 889, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 889, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":890 + * if self.exc_info: + * if self.handle_user_exception(frame): + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * elif event == 'call': + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 890, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_r = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":889 + * + * if self.exc_info: + * if self.handle_user_exception(frame): # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":888 + * info.pydev_step_stop = None + * + * if self.exc_info: # <<<<<<<<<<<<<< + * if self.handle_user_exception(frame): + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":864 + * is_exception_event = False + * + * elif event == 'return': # <<<<<<<<<<<<<< + * is_line = False + * is_return = True + */ + goto __pyx_L29; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":892 + * return self.trace_dispatch + * + * elif event == 'call': # <<<<<<<<<<<<<< + * is_line = False + * is_call = True + */ + __pyx_t_14 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 892, __pyx_L4_error) + __pyx_t_9 = (__pyx_t_14 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":893 + * + * elif event == 'call': + * is_line = False # <<<<<<<<<<<<<< + * is_call = True + * is_return = False + */ + __pyx_v_is_line = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":894 + * elif event == 'call': + * is_line = False + * is_call = True # <<<<<<<<<<<<<< + * is_return = False + * is_exception_event = False + */ + __pyx_v_is_call = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":895 + * is_line = False + * is_call = True + * is_return = False # <<<<<<<<<<<<<< + * is_exception_event = False + * if frame.f_code.co_firstlineno == frame.f_lineno: # Check line to deal with async/await. + */ + __pyx_v_is_return = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":896 + * is_call = True + * is_return = False + * is_exception_event = False # <<<<<<<<<<<<<< + * if frame.f_code.co_firstlineno == frame.f_lineno: # Check line to deal with async/await. + * function_breakpoint_on_call_event = main_debugger.function_breakpoint_name_to_breakpoint.get(frame.f_code.co_name) + */ + __pyx_v_is_exception_event = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":897 + * is_return = False + * is_exception_event = False + * if frame.f_code.co_firstlineno == frame.f_lineno: # Check line to deal with async/await. # <<<<<<<<<<<<<< + * function_breakpoint_on_call_event = main_debugger.function_breakpoint_name_to_breakpoint.get(frame.f_code.co_name) + * + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 897, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_co_firstlineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 897, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 897, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyObject_RichCompare(__pyx_t_1, __pyx_t_7, Py_EQ); __Pyx_XGOTREF(__pyx_t_8); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 897, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 897, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":898 + * is_exception_event = False + * if frame.f_code.co_firstlineno == frame.f_lineno: # Check line to deal with async/await. + * function_breakpoint_on_call_event = main_debugger.function_breakpoint_name_to_breakpoint.get(frame.f_code.co_name) # <<<<<<<<<<<<<< + * + * elif event == 'exception': + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_function_breakpoint_name_to_brea); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 898, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_get); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 898, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 898, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_co_name); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 898, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_8 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_7, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 898, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_function_breakpoint_on_call_event, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":897 + * is_return = False + * is_exception_event = False + * if frame.f_code.co_firstlineno == frame.f_lineno: # Check line to deal with async/await. # <<<<<<<<<<<<<< + * function_breakpoint_on_call_event = main_debugger.function_breakpoint_name_to_breakpoint.get(frame.f_code.co_name) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":892 + * return self.trace_dispatch + * + * elif event == 'call': # <<<<<<<<<<<<<< + * is_line = False + * is_call = True + */ + goto __pyx_L29; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":900 + * function_breakpoint_on_call_event = main_debugger.function_breakpoint_name_to_breakpoint.get(frame.f_code.co_name) + * + * elif event == 'exception': # <<<<<<<<<<<<<< + * is_exception_event = True + * breakpoints_for_file = None + */ + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_exception, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 900, __pyx_L4_error) + __pyx_t_14 = (__pyx_t_9 != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":901 + * + * elif event == 'exception': + * is_exception_event = True # <<<<<<<<<<<<<< + * breakpoints_for_file = None + * if has_exception_breakpoints: + */ + __pyx_v_is_exception_event = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":902 + * elif event == 'exception': + * is_exception_event = True + * breakpoints_for_file = None # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + */ + __Pyx_INCREF(Py_None); + __pyx_v_breakpoints_for_file = ((PyObject*)Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":903 + * is_exception_event = True + * breakpoints_for_file = None + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: + */ + __pyx_t_14 = (__pyx_v_has_exception_breakpoints != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":904 + * breakpoints_for_file = None + * if has_exception_breakpoints: + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) # <<<<<<<<<<<<<< + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + */ + __pyx_t_8 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_should_stop_on_exception(__pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 904, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + if ((likely(PyTuple_CheckExact(__pyx_t_8))) || (PyList_CheckExact(__pyx_t_8))) { + PyObject* sequence = __pyx_t_8; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 904, __pyx_L4_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_1 = PyList_GET_ITEM(sequence, 0); + __pyx_t_4 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_1 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 904, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 904, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_7 = PyObject_GetIter(__pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 904, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_15 = Py_TYPE(__pyx_t_7)->tp_iternext; + index = 0; __pyx_t_1 = __pyx_t_15(__pyx_t_7); if (unlikely(!__pyx_t_1)) goto __pyx_L39_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + index = 1; __pyx_t_4 = __pyx_t_15(__pyx_t_7); if (unlikely(!__pyx_t_4)) goto __pyx_L39_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_15(__pyx_t_7), 2) < 0) __PYX_ERR(0, 904, __pyx_L4_error) + __pyx_t_15 = NULL; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L40_unpacking_done; + __pyx_L39_unpacking_failed:; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_15 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 904, __pyx_L4_error) + __pyx_L40_unpacking_done:; + } + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_14 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 904, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_should_stop = __pyx_t_14; + __Pyx_DECREF_SET(__pyx_v_frame, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":905 + * if has_exception_breakpoints: + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: # <<<<<<<<<<<<<< + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch + */ + __pyx_t_14 = (__pyx_v_should_stop != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":906 + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): # <<<<<<<<<<<<<< + * return self.trace_dispatch + * is_line = False + */ + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_EXCEPTION_TYPE_HANDLED); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 906, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + if (!(likely(PyString_CheckExact(__pyx_t_8))||((__pyx_t_8) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_8)->tp_name), 0))) __PYX_ERR(0, 906, __pyx_L4_error) + __pyx_t_4 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_handle_exception(__pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg, ((PyObject*)__pyx_t_8)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 906, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 906, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":907 + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch # <<<<<<<<<<<<<< + * is_line = False + * is_return = False + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 907, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":906 + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): # <<<<<<<<<<<<<< + * return self.trace_dispatch + * is_line = False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":905 + * if has_exception_breakpoints: + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: # <<<<<<<<<<<<<< + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":903 + * is_exception_event = True + * breakpoints_for_file = None + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * should_stop, frame = self._should_stop_on_exception(frame, event, arg) + * if should_stop: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":908 + * if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + * return self.trace_dispatch + * is_line = False # <<<<<<<<<<<<<< + * is_return = False + * is_call = False + */ + __pyx_v_is_line = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":909 + * return self.trace_dispatch + * is_line = False + * is_return = False # <<<<<<<<<<<<<< + * is_call = False + * + */ + __pyx_v_is_return = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":910 + * is_line = False + * is_return = False + * is_call = False # <<<<<<<<<<<<<< + * + * else: + */ + __pyx_v_is_call = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":900 + * function_breakpoint_on_call_event = main_debugger.function_breakpoint_name_to_breakpoint.get(frame.f_code.co_name) + * + * elif event == 'exception': # <<<<<<<<<<<<<< + * is_exception_event = True + * breakpoints_for_file = None + */ + goto __pyx_L29; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":914 + * else: + * # Unexpected: just keep the same trace func (i.e.: event == 'c_XXX'). + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * if not is_exception_event: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 914, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L3_return; + } + __pyx_L29:; + } + __pyx_L12:; + + /* "_pydevd_bundle/pydevd_cython.pyx":916 + * return self.trace_dispatch + * + * if not is_exception_event: # <<<<<<<<<<<<<< + * breakpoints_for_file = main_debugger.breakpoints.get(abs_path_canonical_path_and_base[1]) + * + */ + __pyx_t_14 = ((!(__pyx_v_is_exception_event != 0)) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":917 + * + * if not is_exception_event: + * breakpoints_for_file = main_debugger.breakpoints.get(abs_path_canonical_path_and_base[1]) # <<<<<<<<<<<<<< + * + * can_skip = False + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_breakpoints); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 917, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_get); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 917, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(__pyx_v_abs_path_canonical_path_and_base == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 917, __pyx_L4_error) + } + __pyx_t_8 = __Pyx_GetItemInt_Tuple(__pyx_v_abs_path_canonical_path_and_base, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 917, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_7, __pyx_t_8) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_8); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 917, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!(likely(PyDict_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(0, 917, __pyx_L4_error) + __Pyx_XDECREF_SET(__pyx_v_breakpoints_for_file, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":919 + * breakpoints_for_file = main_debugger.breakpoints.get(abs_path_canonical_path_and_base[1]) + * + * can_skip = False # <<<<<<<<<<<<<< + * + * if info.pydev_state == 1: # 1 = 1 + */ + __pyx_v_can_skip = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":921 + * can_skip = False + * + * if info.pydev_state == 1: # 1 = 1 # <<<<<<<<<<<<<< + * # we can skip if: + * # - we have no stop marked + */ + __pyx_t_14 = ((__pyx_v_info->pydev_state == 1) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":926 + * # - we should make a step return/step over and we're not in the current frame + * # - we're stepping into a coroutine context and we're not in that context + * if step_cmd == -1: # <<<<<<<<<<<<<< + * can_skip = True + * + */ + __pyx_t_14 = ((__pyx_v_step_cmd == -1L) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":927 + * # - we're stepping into a coroutine context and we're not in that context + * if step_cmd == -1: + * can_skip = True # <<<<<<<<<<<<<< + * + * elif step_cmd in (108, 109, 159, 160) and not self._is_same_frame(stop_frame, frame): + */ + __pyx_v_can_skip = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":926 + * # - we should make a step return/step over and we're not in the current frame + * # - we're stepping into a coroutine context and we're not in that context + * if step_cmd == -1: # <<<<<<<<<<<<<< + * can_skip = True + * + */ + goto __pyx_L45; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":929 + * can_skip = True + * + * elif step_cmd in (108, 109, 159, 160) and not self._is_same_frame(stop_frame, frame): # <<<<<<<<<<<<<< + * can_skip = True + * + */ + switch (__pyx_v_step_cmd) { + case 0x6C: + case 0x6D: + case 0x9F: + case 0xA0: + __pyx_t_9 = 1; + break; + default: + __pyx_t_9 = 0; + break; + } + __pyx_t_11 = (__pyx_t_9 != 0); + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L46_bool_binop_done; + } + __pyx_t_4 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_v_frame); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 929, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 929, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_9 = ((!__pyx_t_11) != 0); + __pyx_t_14 = __pyx_t_9; + __pyx_L46_bool_binop_done:; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":930 + * + * elif step_cmd in (108, 109, 159, 160) and not self._is_same_frame(stop_frame, frame): + * can_skip = True # <<<<<<<<<<<<<< + * + * elif step_cmd == 128 and ( + */ + __pyx_v_can_skip = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":929 + * can_skip = True + * + * elif step_cmd in (108, 109, 159, 160) and not self._is_same_frame(stop_frame, frame): # <<<<<<<<<<<<<< + * can_skip = True + * + */ + goto __pyx_L45; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":932 + * can_skip = True + * + * elif step_cmd == 128 and ( # <<<<<<<<<<<<<< + * stop_frame is not None and + * stop_frame is not frame and + */ + __pyx_t_9 = ((__pyx_v_step_cmd == 0x80) != 0); + if (__pyx_t_9) { + } else { + __pyx_t_14 = __pyx_t_9; + goto __pyx_L48_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":933 + * + * elif step_cmd == 128 and ( + * stop_frame is not None and # <<<<<<<<<<<<<< + * stop_frame is not frame and + * stop_frame is not frame.f_back and + */ + __pyx_t_9 = (__pyx_v_stop_frame != Py_None); + __pyx_t_11 = (__pyx_t_9 != 0); + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L48_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":934 + * elif step_cmd == 128 and ( + * stop_frame is not None and + * stop_frame is not frame and # <<<<<<<<<<<<<< + * stop_frame is not frame.f_back and + * (frame.f_back is None or stop_frame is not frame.f_back.f_back)): + */ + __pyx_t_11 = (__pyx_v_stop_frame != __pyx_v_frame); + __pyx_t_9 = (__pyx_t_11 != 0); + if (__pyx_t_9) { + } else { + __pyx_t_14 = __pyx_t_9; + goto __pyx_L48_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":935 + * stop_frame is not None and + * stop_frame is not frame and + * stop_frame is not frame.f_back and # <<<<<<<<<<<<<< + * (frame.f_back is None or stop_frame is not frame.f_back.f_back)): + * can_skip = True + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 935, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_9 = (__pyx_v_stop_frame != __pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_11 = (__pyx_t_9 != 0); + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L48_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":936 + * stop_frame is not frame and + * stop_frame is not frame.f_back and + * (frame.f_back is None or stop_frame is not frame.f_back.f_back)): # <<<<<<<<<<<<<< + * can_skip = True + * + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 936, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_11 = (__pyx_t_4 == Py_None); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_9 = (__pyx_t_11 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_14 = __pyx_t_9; + goto __pyx_L48_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 936, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 936, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_9 = (__pyx_v_stop_frame != __pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_11 = (__pyx_t_9 != 0); + __pyx_t_14 = __pyx_t_11; + __pyx_L48_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":932 + * can_skip = True + * + * elif step_cmd == 128 and ( # <<<<<<<<<<<<<< + * stop_frame is not None and + * stop_frame is not frame and + */ + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":937 + * stop_frame is not frame.f_back and + * (frame.f_back is None or stop_frame is not frame.f_back.f_back)): + * can_skip = True # <<<<<<<<<<<<<< + * + * elif step_cmd == 144: + */ + __pyx_v_can_skip = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":932 + * can_skip = True + * + * elif step_cmd == 128 and ( # <<<<<<<<<<<<<< + * stop_frame is not None and + * stop_frame is not frame and + */ + goto __pyx_L45; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":939 + * can_skip = True + * + * elif step_cmd == 144: # <<<<<<<<<<<<<< + * if ( + * main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) + */ + __pyx_t_14 = ((__pyx_v_step_cmd == 0x90) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":941 + * elif step_cmd == 144: + * if ( + * main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) # <<<<<<<<<<<<<< + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)) + * ): + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 941, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 941, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 941, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_v_frame, __pyx_t_7, Py_True}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 941, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_v_frame, __pyx_t_7, Py_True}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 941, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 941, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_frame); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_t_7); + __Pyx_INCREF(Py_True); + __Pyx_GIVEREF(Py_True); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_5, Py_True); + __pyx_t_7 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 941, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 941, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L55_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":942 + * if ( + * main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)) # <<<<<<<<<<<<<< + * ): + * can_skip = True + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_11 = (__pyx_t_1 == Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_9 = (__pyx_t_11 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_14 = __pyx_t_9; + goto __pyx_L55_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_f_code); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_t_6, __pyx_t_7, Py_True}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_t_6, __pyx_t_7, Py_True}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_5, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_5, __pyx_t_7); + __Pyx_INCREF(Py_True); + __Pyx_GIVEREF(Py_True); + PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_5, Py_True); + __pyx_t_6 = 0; + __pyx_t_7 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 942, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_14 = __pyx_t_9; + __pyx_L55_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":940 + * + * elif step_cmd == 144: + * if ( # <<<<<<<<<<<<<< + * main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)) + */ + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":944 + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)) + * ): + * can_skip = True # <<<<<<<<<<<<<< + * + * elif step_cmd == 206: + */ + __pyx_v_can_skip = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":940 + * + * elif step_cmd == 144: + * if ( # <<<<<<<<<<<<<< + * main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) + * and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":939 + * can_skip = True + * + * elif step_cmd == 144: # <<<<<<<<<<<<<< + * if ( + * main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) + */ + goto __pyx_L45; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":946 + * can_skip = True + * + * elif step_cmd == 206: # <<<<<<<<<<<<<< + * f = frame + * while f is not None: + */ + __pyx_t_14 = ((__pyx_v_step_cmd == 0xCE) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":947 + * + * elif step_cmd == 206: + * f = frame # <<<<<<<<<<<<<< + * while f is not None: + * if self._is_same_frame(stop_frame, f): + */ + __Pyx_INCREF(__pyx_v_frame); + __Pyx_XDECREF_SET(__pyx_v_f, __pyx_v_frame); + + /* "_pydevd_bundle/pydevd_cython.pyx":948 + * elif step_cmd == 206: + * f = frame + * while f is not None: # <<<<<<<<<<<<<< + * if self._is_same_frame(stop_frame, f): + * break + */ + while (1) { + __pyx_t_14 = (__pyx_v_f != Py_None); + __pyx_t_9 = (__pyx_t_14 != 0); + if (!__pyx_t_9) break; + + /* "_pydevd_bundle/pydevd_cython.pyx":949 + * f = frame + * while f is not None: + * if self._is_same_frame(stop_frame, f): # <<<<<<<<<<<<<< + * break + * f = f.f_back + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_v_f); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 949, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 949, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":950 + * while f is not None: + * if self._is_same_frame(stop_frame, f): + * break # <<<<<<<<<<<<<< + * f = f.f_back + * else: + */ + goto __pyx_L59_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":949 + * f = frame + * while f is not None: + * if self._is_same_frame(stop_frame, f): # <<<<<<<<<<<<<< + * break + * f = f.f_back + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":951 + * if self._is_same_frame(stop_frame, f): + * break + * f = f.f_back # <<<<<<<<<<<<<< + * else: + * can_skip = True + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 951, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_f, __pyx_t_1); + __pyx_t_1 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":953 + * f = f.f_back + * else: + * can_skip = True # <<<<<<<<<<<<<< + * + * if can_skip: + */ + /*else*/ { + __pyx_v_can_skip = 1; + } + __pyx_L59_break:; + + /* "_pydevd_bundle/pydevd_cython.pyx":946 + * can_skip = True + * + * elif step_cmd == 206: # <<<<<<<<<<<<<< + * f = frame + * while f is not None: + */ + } + __pyx_L45:; + + /* "_pydevd_bundle/pydevd_cython.pyx":955 + * can_skip = True + * + * if can_skip: # <<<<<<<<<<<<<< + * if plugin_manager is not None and ( + * main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks): + */ + __pyx_t_9 = (__pyx_v_can_skip != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":956 + * + * if can_skip: + * if plugin_manager is not None and ( # <<<<<<<<<<<<<< + * main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks): + * can_skip = plugin_manager.can_skip(main_debugger, frame) + */ + __pyx_t_14 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_11 = (__pyx_t_14 != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L63_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":957 + * if can_skip: + * if plugin_manager is not None and ( + * main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks): # <<<<<<<<<<<<<< + * can_skip = plugin_manager.can_skip(main_debugger, frame) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_line_breaks); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 957, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 957, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L63_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_exception_breaks); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 957, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 957, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_9 = __pyx_t_11; + __pyx_L63_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":956 + * + * if can_skip: + * if plugin_manager is not None and ( # <<<<<<<<<<<<<< + * main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks): + * can_skip = plugin_manager.can_skip(main_debugger, frame) + */ + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":958 + * if plugin_manager is not None and ( + * main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks): + * can_skip = plugin_manager.can_skip(main_debugger, frame) # <<<<<<<<<<<<<< + * + * if can_skip and main_debugger.show_return_values and info.pydev_step_cmd in (108, 159) and self._is_same_frame(stop_frame, frame.f_back): + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_can_skip); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 958, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_main_debugger, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 958, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_main_debugger, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 958, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_7 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 958, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_5, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_5, __pyx_v_frame); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 958, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_9 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 958, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_can_skip = __pyx_t_9; + + /* "_pydevd_bundle/pydevd_cython.pyx":956 + * + * if can_skip: + * if plugin_manager is not None and ( # <<<<<<<<<<<<<< + * main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks): + * can_skip = plugin_manager.can_skip(main_debugger, frame) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":960 + * can_skip = plugin_manager.can_skip(main_debugger, frame) + * + * if can_skip and main_debugger.show_return_values and info.pydev_step_cmd in (108, 159) and self._is_same_frame(stop_frame, frame.f_back): # <<<<<<<<<<<<<< + * # trace function for showing return values after step over + * can_skip = False + */ + __pyx_t_11 = (__pyx_v_can_skip != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L67_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_show_return_values); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 960, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 960, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L67_bool_binop_done; + } + switch (__pyx_v_info->pydev_step_cmd) { + case 0x6C: + case 0x9F: + __pyx_t_11 = 1; + break; + default: + __pyx_t_11 = 0; + break; + } + __pyx_t_14 = (__pyx_t_11 != 0); + if (__pyx_t_14) { + } else { + __pyx_t_9 = __pyx_t_14; + goto __pyx_L67_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 960, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 960, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 960, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_9 = __pyx_t_14; + __pyx_L67_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":962 + * if can_skip and main_debugger.show_return_values and info.pydev_step_cmd in (108, 159) and self._is_same_frame(stop_frame, frame.f_back): + * # trace function for showing return values after step over + * can_skip = False # <<<<<<<<<<<<<< + * + * # Let's check to see if we are in a function that has a breakpoint. If we don't have a breakpoint, + */ + __pyx_v_can_skip = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":960 + * can_skip = plugin_manager.can_skip(main_debugger, frame) + * + * if can_skip and main_debugger.show_return_values and info.pydev_step_cmd in (108, 159) and self._is_same_frame(stop_frame, frame.f_back): # <<<<<<<<<<<<<< + * # trace function for showing return values after step over + * can_skip = False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":955 + * can_skip = True + * + * if can_skip: # <<<<<<<<<<<<<< + * if plugin_manager is not None and ( + * main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks): + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":921 + * can_skip = False + * + * if info.pydev_state == 1: # 1 = 1 # <<<<<<<<<<<<<< + * # we can skip if: + * # - we have no stop marked + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":969 + * # so, that's why the additional checks are there. + * + * if function_breakpoint_on_call_event: # <<<<<<<<<<<<<< + * pass # Do nothing here (just keep on going as we can't skip it). + * + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_function_breakpoint_on_call_event); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 969, __pyx_L4_error) + if (__pyx_t_9) { + goto __pyx_L71; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":972 + * pass # Do nothing here (just keep on going as we can't skip it). + * + * elif not breakpoints_for_file: # <<<<<<<<<<<<<< + * if can_skip: + * if has_exception_breakpoints: + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoints_for_file); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 972, __pyx_L4_error) + __pyx_t_14 = ((!__pyx_t_9) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":973 + * + * elif not breakpoints_for_file: + * if can_skip: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * return self.trace_exception + */ + __pyx_t_14 = (__pyx_v_can_skip != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":974 + * elif not breakpoints_for_file: + * if can_skip: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * return self.trace_exception + * else: + */ + __pyx_t_14 = (__pyx_v_has_exception_breakpoints != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":975 + * if can_skip: + * if has_exception_breakpoints: + * return self.trace_exception # <<<<<<<<<<<<<< + * else: + * return None if is_call else NO_FTRACE + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_exception); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 975, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":974 + * elif not breakpoints_for_file: + * if can_skip: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * return self.trace_exception + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":977 + * return self.trace_exception + * else: + * return None if is_call else NO_FTRACE # <<<<<<<<<<<<<< + * + * else: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + if ((__pyx_v_is_call != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_4 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 977, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __pyx_t_1; + __pyx_t_1 = 0; + } + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L3_return; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":973 + * + * elif not breakpoints_for_file: + * if can_skip: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * return self.trace_exception + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":972 + * pass # Do nothing here (just keep on going as we can't skip it). + * + * elif not breakpoints_for_file: # <<<<<<<<<<<<<< + * if can_skip: + * if has_exception_breakpoints: + */ + goto __pyx_L71; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":981 + * else: + * # When cached, 0 means we don't have a breakpoint and 1 means we have. + * if can_skip: # <<<<<<<<<<<<<< + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: + */ + /*else*/ { + __pyx_t_14 = (__pyx_v_can_skip != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":982 + * # When cached, 0 means we don't have a breakpoint and 1 means we have. + * if can_skip: + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) # <<<<<<<<<<<<<< + * if breakpoints_in_line_cache == 0: + * return self.trace_dispatch + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(0, 982, __pyx_L4_error) + } + __pyx_t_4 = __Pyx_PyDict_GetItemDefault(__pyx_v_frame_skips_cache, __pyx_v_line_cache_key, __pyx_int_neg_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 982, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_4); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 982, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_breakpoints_in_line_cache = __pyx_t_5; + + /* "_pydevd_bundle/pydevd_cython.pyx":983 + * if can_skip: + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + __pyx_t_14 = ((__pyx_v_breakpoints_in_line_cache == 0) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":984 + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 984, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":983 + * if can_skip: + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":981 + * else: + * # When cached, 0 means we don't have a breakpoint and 1 means we have. + * if can_skip: # <<<<<<<<<<<<<< + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":986 + * return self.trace_dispatch + * + * breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) # <<<<<<<<<<<<<< + * if breakpoints_in_frame_cache != -1: + * # Gotten from cache. + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(0, 986, __pyx_L4_error) + } + __pyx_t_4 = __Pyx_PyDict_GetItemDefault(__pyx_v_frame_skips_cache, __pyx_v_frame_cache_key, __pyx_int_neg_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 986, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_4); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 986, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_breakpoints_in_frame_cache = __pyx_t_5; + + /* "_pydevd_bundle/pydevd_cython.pyx":987 + * + * breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + * if breakpoints_in_frame_cache != -1: # <<<<<<<<<<<<<< + * # Gotten from cache. + * has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 + */ + __pyx_t_14 = ((__pyx_v_breakpoints_in_frame_cache != -1L) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":989 + * if breakpoints_in_frame_cache != -1: + * # Gotten from cache. + * has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 # <<<<<<<<<<<<<< + * + * else: + */ + __pyx_v_has_breakpoint_in_frame = (__pyx_v_breakpoints_in_frame_cache == 1); + + /* "_pydevd_bundle/pydevd_cython.pyx":987 + * + * breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + * if breakpoints_in_frame_cache != -1: # <<<<<<<<<<<<<< + * # Gotten from cache. + * has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 + */ + goto __pyx_L76; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":992 + * + * else: + * has_breakpoint_in_frame = False # <<<<<<<<<<<<<< + * + * try: + */ + /*else*/ { + __pyx_v_has_breakpoint_in_frame = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":994 + * has_breakpoint_in_frame = False + * + * try: # <<<<<<<<<<<<<< + * func_lines = set() + * for offset_and_lineno in dis.findlinestarts(frame.f_code): + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_16, &__pyx_t_17, &__pyx_t_18); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __Pyx_XGOTREF(__pyx_t_18); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":995 + * + * try: + * func_lines = set() # <<<<<<<<<<<<<< + * for offset_and_lineno in dis.findlinestarts(frame.f_code): + * func_lines.add(offset_and_lineno[1]) + */ + __pyx_t_4 = PySet_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 995, __pyx_L77_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v_func_lines = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":996 + * try: + * func_lines = set() + * for offset_and_lineno in dis.findlinestarts(frame.f_code): # <<<<<<<<<<<<<< + * func_lines.add(offset_and_lineno[1]) + * except: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_dis); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 996, __pyx_L77_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_findlinestarts); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 996, __pyx_L77_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 996, __pyx_L77_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_4 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_3, __pyx_t_1) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 996, __pyx_L77_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (likely(PyList_CheckExact(__pyx_t_4)) || PyTuple_CheckExact(__pyx_t_4)) { + __pyx_t_7 = __pyx_t_4; __Pyx_INCREF(__pyx_t_7); __pyx_t_12 = 0; + __pyx_t_13 = NULL; + } else { + __pyx_t_12 = -1; __pyx_t_7 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 996, __pyx_L77_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_13 = Py_TYPE(__pyx_t_7)->tp_iternext; if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 996, __pyx_L77_error) + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + for (;;) { + if (likely(!__pyx_t_13)) { + if (likely(PyList_CheckExact(__pyx_t_7))) { + if (__pyx_t_12 >= PyList_GET_SIZE(__pyx_t_7)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_7, __pyx_t_12); __Pyx_INCREF(__pyx_t_4); __pyx_t_12++; if (unlikely(0 < 0)) __PYX_ERR(0, 996, __pyx_L77_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_7, __pyx_t_12); __pyx_t_12++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 996, __pyx_L77_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + if (__pyx_t_12 >= PyTuple_GET_SIZE(__pyx_t_7)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_7, __pyx_t_12); __Pyx_INCREF(__pyx_t_4); __pyx_t_12++; if (unlikely(0 < 0)) __PYX_ERR(0, 996, __pyx_L77_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_7, __pyx_t_12); __pyx_t_12++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 996, __pyx_L77_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } + } else { + __pyx_t_4 = __pyx_t_13(__pyx_t_7); + if (unlikely(!__pyx_t_4)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 996, __pyx_L77_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_4); + } + __Pyx_XDECREF_SET(__pyx_v_offset_and_lineno, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":997 + * func_lines = set() + * for offset_and_lineno in dis.findlinestarts(frame.f_code): + * func_lines.add(offset_and_lineno[1]) # <<<<<<<<<<<<<< + * except: + * # This is a fallback for implementations where we can't get the function + */ + __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_offset_and_lineno, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 997, __pyx_L77_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_19 = PySet_Add(__pyx_v_func_lines, __pyx_t_4); if (unlikely(__pyx_t_19 == ((int)-1))) __PYX_ERR(0, 997, __pyx_L77_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":996 + * try: + * func_lines = set() + * for offset_and_lineno in dis.findlinestarts(frame.f_code): # <<<<<<<<<<<<<< + * func_lines.add(offset_and_lineno[1]) + * except: + */ + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":994 + * has_breakpoint_in_frame = False + * + * try: # <<<<<<<<<<<<<< + * func_lines = set() + * for offset_and_lineno in dis.findlinestarts(frame.f_code): + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1017 + * break + * else: + * for bp_line in breakpoints_for_file: # iterate on keys # <<<<<<<<<<<<<< + * if bp_line in func_lines: + * has_breakpoint_in_frame = True + */ + /*else:*/ { + __pyx_t_12 = 0; + if (unlikely(__pyx_v_breakpoints_for_file == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 1017, __pyx_L79_except_error) + } + __pyx_t_4 = __Pyx_dict_iterator(__pyx_v_breakpoints_for_file, 1, ((PyObject *)NULL), (&__pyx_t_20), (&__pyx_t_5)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1017, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + __pyx_t_7 = __pyx_t_4; + __pyx_t_4 = 0; + while (1) { + __pyx_t_10 = __Pyx_dict_iter_next(__pyx_t_7, __pyx_t_20, &__pyx_t_12, &__pyx_t_4, NULL, NULL, __pyx_t_5); + if (unlikely(__pyx_t_10 == 0)) break; + if (unlikely(__pyx_t_10 == -1)) __PYX_ERR(0, 1017, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_10 = __Pyx_PyInt_As_int(__pyx_t_4); if (unlikely((__pyx_t_10 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1017, __pyx_L79_except_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_bp_line = __pyx_t_10; + + /* "_pydevd_bundle/pydevd_cython.pyx":1018 + * else: + * for bp_line in breakpoints_for_file: # iterate on keys + * if bp_line in func_lines: # <<<<<<<<<<<<<< + * has_breakpoint_in_frame = True + * break + */ + __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_bp_line); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1018, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_14 = (__Pyx_PySet_ContainsTF(__pyx_t_4, __pyx_v_func_lines, Py_EQ)); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1018, __pyx_L79_except_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_9 = (__pyx_t_14 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1019 + * for bp_line in breakpoints_for_file: # iterate on keys + * if bp_line in func_lines: + * has_breakpoint_in_frame = True # <<<<<<<<<<<<<< + * break + * + */ + __pyx_v_has_breakpoint_in_frame = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1020 + * if bp_line in func_lines: + * has_breakpoint_in_frame = True + * break # <<<<<<<<<<<<<< + * + * # Cache the value (1 or 0 or -1 for default because of cython). + */ + goto __pyx_L86_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1018 + * else: + * for bp_line in breakpoints_for_file: # iterate on keys + * if bp_line in func_lines: # <<<<<<<<<<<<<< + * has_breakpoint_in_frame = True + * break + */ + } + } + __pyx_L86_break:; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_18); __pyx_t_18 = 0; + goto __pyx_L82_try_end; + __pyx_L77_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":998 + * for offset_and_lineno in dis.findlinestarts(frame.f_code): + * func_lines.add(offset_and_lineno[1]) + * except: # <<<<<<<<<<<<<< + * # This is a fallback for implementations where we can't get the function + * # lines -- i.e.: jython (in this case clients need to provide the function + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_7, &__pyx_t_4, &__pyx_t_1) < 0) __PYX_ERR(0, 998, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":1005 + * + * # Checks the breakpoint to see if there is a context match in some function. + * curr_func_name = frame.f_code.co_name # <<<<<<<<<<<<<< + * + * # global context is set with an empty name + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1005, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_name); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1005, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_6))||((__pyx_t_6) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_6)->tp_name), 0))) __PYX_ERR(0, 1005, __pyx_L79_except_error) + __pyx_v_curr_func_name = ((PyObject*)__pyx_t_6); + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1008 + * + * # global context is set with an empty name + * if curr_func_name in ('?', '', ''): # <<<<<<<<<<<<<< + * curr_func_name = '' + * + */ + __Pyx_INCREF(__pyx_v_curr_func_name); + __pyx_t_21 = __pyx_v_curr_func_name; + __pyx_t_14 = (__Pyx_PyString_Equals(__pyx_t_21, __pyx_kp_s__3, Py_EQ)); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1008, __pyx_L79_except_error) + __pyx_t_11 = (__pyx_t_14 != 0); + if (!__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L91_bool_binop_done; + } + __pyx_t_11 = (__Pyx_PyString_Equals(__pyx_t_21, __pyx_kp_s_module, Py_EQ)); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1008, __pyx_L79_except_error) + __pyx_t_14 = (__pyx_t_11 != 0); + if (!__pyx_t_14) { + } else { + __pyx_t_9 = __pyx_t_14; + goto __pyx_L91_bool_binop_done; + } + __pyx_t_14 = (__Pyx_PyString_Equals(__pyx_t_21, __pyx_kp_s_lambda, Py_EQ)); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1008, __pyx_L79_except_error) + __pyx_t_11 = (__pyx_t_14 != 0); + __pyx_t_9 = __pyx_t_11; + __pyx_L91_bool_binop_done:; + __Pyx_DECREF(__pyx_t_21); __pyx_t_21 = 0; + __pyx_t_11 = (__pyx_t_9 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1009 + * # global context is set with an empty name + * if curr_func_name in ('?', '', ''): + * curr_func_name = '' # <<<<<<<<<<<<<< + * + * for bp in breakpoints_for_file.values(): + */ + __Pyx_INCREF(__pyx_kp_s_); + __Pyx_DECREF_SET(__pyx_v_curr_func_name, __pyx_kp_s_); + + /* "_pydevd_bundle/pydevd_cython.pyx":1008 + * + * # global context is set with an empty name + * if curr_func_name in ('?', '', ''): # <<<<<<<<<<<<<< + * curr_func_name = '' + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1011 + * curr_func_name = '' + * + * for bp in breakpoints_for_file.values(): # <<<<<<<<<<<<<< + * # will match either global or some function + * if bp.func_name in ('None', curr_func_name): + */ + if (unlikely(__pyx_v_breakpoints_for_file == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "values"); + __PYX_ERR(0, 1011, __pyx_L79_except_error) + } + __pyx_t_6 = __Pyx_PyDict_Values(__pyx_v_breakpoints_for_file); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1011, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_6); + if (likely(PyList_CheckExact(__pyx_t_6)) || PyTuple_CheckExact(__pyx_t_6)) { + __pyx_t_3 = __pyx_t_6; __Pyx_INCREF(__pyx_t_3); __pyx_t_20 = 0; + __pyx_t_13 = NULL; + } else { + __pyx_t_20 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1011, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_13 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 1011, __pyx_L79_except_error) + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + for (;;) { + if (likely(!__pyx_t_13)) { + if (likely(PyList_CheckExact(__pyx_t_3))) { + if (__pyx_t_20 >= PyList_GET_SIZE(__pyx_t_3)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_20); __Pyx_INCREF(__pyx_t_6); __pyx_t_20++; if (unlikely(0 < 0)) __PYX_ERR(0, 1011, __pyx_L79_except_error) + #else + __pyx_t_6 = PySequence_ITEM(__pyx_t_3, __pyx_t_20); __pyx_t_20++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1011, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } else { + if (__pyx_t_20 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_20); __Pyx_INCREF(__pyx_t_6); __pyx_t_20++; if (unlikely(0 < 0)) __PYX_ERR(0, 1011, __pyx_L79_except_error) + #else + __pyx_t_6 = PySequence_ITEM(__pyx_t_3, __pyx_t_20); __pyx_t_20++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1011, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } + } else { + __pyx_t_6 = __pyx_t_13(__pyx_t_3); + if (unlikely(!__pyx_t_6)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 1011, __pyx_L79_except_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_6); + } + __Pyx_XDECREF_SET(__pyx_v_bp, __pyx_t_6); + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1013 + * for bp in breakpoints_for_file.values(): + * # will match either global or some function + * if bp.func_name in ('None', curr_func_name): # <<<<<<<<<<<<<< + * has_breakpoint_in_frame = True + * break + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_bp, __pyx_n_s_func_name); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1013, __pyx_L79_except_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_t_6, __pyx_n_s_None, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1013, __pyx_L79_except_error) + if (!__pyx_t_9) { + } else { + __pyx_t_11 = __pyx_t_9; + goto __pyx_L97_bool_binop_done; + } + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_t_6, __pyx_v_curr_func_name, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1013, __pyx_L79_except_error) + __pyx_t_11 = __pyx_t_9; + __pyx_L97_bool_binop_done:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_9 = (__pyx_t_11 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1014 + * # will match either global or some function + * if bp.func_name in ('None', curr_func_name): + * has_breakpoint_in_frame = True # <<<<<<<<<<<<<< + * break + * else: + */ + __pyx_v_has_breakpoint_in_frame = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1015 + * if bp.func_name in ('None', curr_func_name): + * has_breakpoint_in_frame = True + * break # <<<<<<<<<<<<<< + * else: + * for bp_line in breakpoints_for_file: # iterate on keys + */ + goto __pyx_L95_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1013 + * for bp in breakpoints_for_file.values(): + * # will match either global or some function + * if bp.func_name in ('None', curr_func_name): # <<<<<<<<<<<<<< + * has_breakpoint_in_frame = True + * break + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1011 + * curr_func_name = '' + * + * for bp in breakpoints_for_file.values(): # <<<<<<<<<<<<<< + * # will match either global or some function + * if bp.func_name in ('None', curr_func_name): + */ + } + __pyx_L95_break:; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L78_exception_handled; + } + __pyx_L79_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":994 + * has_breakpoint_in_frame = False + * + * try: # <<<<<<<<<<<<<< + * func_lines = set() + * for offset_and_lineno in dis.findlinestarts(frame.f_code): + */ + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + goto __pyx_L4_error; + __pyx_L78_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + __pyx_L82_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1023 + * + * # Cache the value (1 or 0 or -1 for default because of cython). + * if has_breakpoint_in_frame: # <<<<<<<<<<<<<< + * frame_skips_cache[frame_cache_key] = 1 + * else: + */ + __pyx_t_9 = (__pyx_v_has_breakpoint_in_frame != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1024 + * # Cache the value (1 or 0 or -1 for default because of cython). + * if has_breakpoint_in_frame: + * frame_skips_cache[frame_cache_key] = 1 # <<<<<<<<<<<<<< + * else: + * frame_skips_cache[frame_cache_key] = 0 + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1024, __pyx_L4_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_frame_skips_cache, __pyx_v_frame_cache_key, __pyx_int_1) < 0)) __PYX_ERR(0, 1024, __pyx_L4_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1023 + * + * # Cache the value (1 or 0 or -1 for default because of cython). + * if has_breakpoint_in_frame: # <<<<<<<<<<<<<< + * frame_skips_cache[frame_cache_key] = 1 + * else: + */ + goto __pyx_L99; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1026 + * frame_skips_cache[frame_cache_key] = 1 + * else: + * frame_skips_cache[frame_cache_key] = 0 # <<<<<<<<<<<<<< + * + * if can_skip and not has_breakpoint_in_frame: + */ + /*else*/ { + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1026, __pyx_L4_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_frame_skips_cache, __pyx_v_frame_cache_key, __pyx_int_0) < 0)) __PYX_ERR(0, 1026, __pyx_L4_error) + } + __pyx_L99:; + } + __pyx_L76:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1028 + * frame_skips_cache[frame_cache_key] = 0 + * + * if can_skip and not has_breakpoint_in_frame: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * return self.trace_exception + */ + __pyx_t_11 = (__pyx_v_can_skip != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L101_bool_binop_done; + } + __pyx_t_11 = ((!(__pyx_v_has_breakpoint_in_frame != 0)) != 0); + __pyx_t_9 = __pyx_t_11; + __pyx_L101_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1029 + * + * if can_skip and not has_breakpoint_in_frame: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * return self.trace_exception + * else: + */ + __pyx_t_9 = (__pyx_v_has_exception_breakpoints != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1030 + * if can_skip and not has_breakpoint_in_frame: + * if has_exception_breakpoints: + * return self.trace_exception # <<<<<<<<<<<<<< + * else: + * return None if is_call else NO_FTRACE + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_exception); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1030, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1029 + * + * if can_skip and not has_breakpoint_in_frame: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * return self.trace_exception + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1032 + * return self.trace_exception + * else: + * return None if is_call else NO_FTRACE # <<<<<<<<<<<<<< + * + * # We may have hit a breakpoint or we are already in step mode. Either way, let's check what we should do in this frame + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + if ((__pyx_v_is_call != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1032, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __pyx_t_4; + __pyx_t_4 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L3_return; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1028 + * frame_skips_cache[frame_cache_key] = 0 + * + * if can_skip and not has_breakpoint_in_frame: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * return self.trace_exception + */ + } + } + __pyx_L71:; + + /* "_pydevd_bundle/pydevd_cython.pyx":916 + * return self.trace_dispatch + * + * if not is_exception_event: # <<<<<<<<<<<<<< + * breakpoints_for_file = main_debugger.breakpoints.get(abs_path_canonical_path_and_base[1]) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1037 + * # if DEBUG: print('NOT skipped: %s %s %s %s' % (frame.f_lineno, frame.f_code.co_name, event, frame.__class__.__name__)) + * + * try: # <<<<<<<<<<<<<< + * flag = False + * # return is not taken into account for breakpoint hit because we'd have a double-hit in this case + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_18, &__pyx_t_17, &__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_18); + __Pyx_XGOTREF(__pyx_t_17); + __Pyx_XGOTREF(__pyx_t_16); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1038 + * + * try: + * flag = False # <<<<<<<<<<<<<< + * # return is not taken into account for breakpoint hit because we'd have a double-hit in this case + * # (one for the line and the other for the return). + */ + __Pyx_INCREF(Py_False); + __pyx_v_flag = Py_False; + + /* "_pydevd_bundle/pydevd_cython.pyx":1042 + * # (one for the line and the other for the return). + * + * stop_info = {} # <<<<<<<<<<<<<< + * breakpoint = None + * exist_result = False + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1042, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_stop_info = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1043 + * + * stop_info = {} + * breakpoint = None # <<<<<<<<<<<<<< + * exist_result = False + * stop = False + */ + __Pyx_INCREF(Py_None); + __pyx_v_breakpoint = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":1044 + * stop_info = {} + * breakpoint = None + * exist_result = False # <<<<<<<<<<<<<< + * stop = False + * stop_reason = 111 + */ + __pyx_v_exist_result = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1045 + * breakpoint = None + * exist_result = False + * stop = False # <<<<<<<<<<<<<< + * stop_reason = 111 + * bp_type = None + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1046 + * exist_result = False + * stop = False + * stop_reason = 111 # <<<<<<<<<<<<<< + * bp_type = None + * + */ + __Pyx_INCREF(__pyx_int_111); + __pyx_v_stop_reason = __pyx_int_111; + + /* "_pydevd_bundle/pydevd_cython.pyx":1047 + * stop = False + * stop_reason = 111 + * bp_type = None # <<<<<<<<<<<<<< + * + * if function_breakpoint_on_call_event: + */ + __Pyx_INCREF(Py_None); + __pyx_v_bp_type = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":1049 + * bp_type = None + * + * if function_breakpoint_on_call_event: # <<<<<<<<<<<<<< + * breakpoint = function_breakpoint_on_call_event + * stop = True + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_function_breakpoint_on_call_event); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1049, __pyx_L104_error) + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1050 + * + * if function_breakpoint_on_call_event: + * breakpoint = function_breakpoint_on_call_event # <<<<<<<<<<<<<< + * stop = True + * new_frame = frame + */ + __Pyx_INCREF(__pyx_v_function_breakpoint_on_call_event); + __Pyx_DECREF_SET(__pyx_v_breakpoint, __pyx_v_function_breakpoint_on_call_event); + + /* "_pydevd_bundle/pydevd_cython.pyx":1051 + * if function_breakpoint_on_call_event: + * breakpoint = function_breakpoint_on_call_event + * stop = True # <<<<<<<<<<<<<< + * new_frame = frame + * stop_reason = CMD_SET_FUNCTION_BREAK + */ + __pyx_v_stop = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1052 + * breakpoint = function_breakpoint_on_call_event + * stop = True + * new_frame = frame # <<<<<<<<<<<<<< + * stop_reason = CMD_SET_FUNCTION_BREAK + * + */ + __Pyx_INCREF(__pyx_v_frame); + __pyx_v_new_frame = __pyx_v_frame; + + /* "_pydevd_bundle/pydevd_cython.pyx":1053 + * stop = True + * new_frame = frame + * stop_reason = CMD_SET_FUNCTION_BREAK # <<<<<<<<<<<<<< + * + * elif is_line and info.pydev_state != 2 and breakpoints_for_file is not None and line in breakpoints_for_file: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_CMD_SET_FUNCTION_BREAK); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1053, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_stop_reason, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1049 + * bp_type = None + * + * if function_breakpoint_on_call_event: # <<<<<<<<<<<<<< + * breakpoint = function_breakpoint_on_call_event + * stop = True + */ + goto __pyx_L110; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1055 + * stop_reason = CMD_SET_FUNCTION_BREAK + * + * elif is_line and info.pydev_state != 2 and breakpoints_for_file is not None and line in breakpoints_for_file: # <<<<<<<<<<<<<< + * breakpoint = breakpoints_for_file[line] + * new_frame = frame + */ + __pyx_t_11 = (__pyx_v_is_line != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L111_bool_binop_done; + } + __pyx_t_11 = ((__pyx_v_info->pydev_state != 2) != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L111_bool_binop_done; + } + if (unlikely(!__pyx_v_breakpoints_for_file)) { __Pyx_RaiseUnboundLocalError("breakpoints_for_file"); __PYX_ERR(0, 1055, __pyx_L104_error) } + __pyx_t_11 = (__pyx_v_breakpoints_for_file != ((PyObject*)Py_None)); + __pyx_t_14 = (__pyx_t_11 != 0); + if (__pyx_t_14) { + } else { + __pyx_t_9 = __pyx_t_14; + goto __pyx_L111_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1055, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_1); + if (unlikely(!__pyx_v_breakpoints_for_file)) { __Pyx_RaiseUnboundLocalError("breakpoints_for_file"); __PYX_ERR(0, 1055, __pyx_L104_error) } + if (unlikely(__pyx_v_breakpoints_for_file == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 1055, __pyx_L104_error) + } + __pyx_t_14 = (__Pyx_PyDict_ContainsTF(__pyx_t_1, __pyx_v_breakpoints_for_file, Py_EQ)); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1055, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_11 = (__pyx_t_14 != 0); + __pyx_t_9 = __pyx_t_11; + __pyx_L111_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1056 + * + * elif is_line and info.pydev_state != 2 and breakpoints_for_file is not None and line in breakpoints_for_file: + * breakpoint = breakpoints_for_file[line] # <<<<<<<<<<<<<< + * new_frame = frame + * stop = True + */ + if (unlikely(!__pyx_v_breakpoints_for_file)) { __Pyx_RaiseUnboundLocalError("breakpoints_for_file"); __PYX_ERR(0, 1056, __pyx_L104_error) } + if (unlikely(__pyx_v_breakpoints_for_file == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1056, __pyx_L104_error) + } + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1056, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyDict_GetItem(__pyx_v_breakpoints_for_file, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1056, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_breakpoint, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1057 + * elif is_line and info.pydev_state != 2 and breakpoints_for_file is not None and line in breakpoints_for_file: + * breakpoint = breakpoints_for_file[line] + * new_frame = frame # <<<<<<<<<<<<<< + * stop = True + * if step_cmd in (108, 159) and (self._is_same_frame(stop_frame, frame) and is_line): + */ + __Pyx_INCREF(__pyx_v_frame); + __pyx_v_new_frame = __pyx_v_frame; + + /* "_pydevd_bundle/pydevd_cython.pyx":1058 + * breakpoint = breakpoints_for_file[line] + * new_frame = frame + * stop = True # <<<<<<<<<<<<<< + * if step_cmd in (108, 159) and (self._is_same_frame(stop_frame, frame) and is_line): + * stop = False # we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + */ + __pyx_v_stop = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1059 + * new_frame = frame + * stop = True + * if step_cmd in (108, 159) and (self._is_same_frame(stop_frame, frame) and is_line): # <<<<<<<<<<<<<< + * stop = False # we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + */ + switch (__pyx_v_step_cmd) { + case 0x6C: + case 0x9F: + __pyx_t_11 = 1; + break; + default: + __pyx_t_11 = 0; + break; + } + __pyx_t_14 = (__pyx_t_11 != 0); + if (__pyx_t_14) { + } else { + __pyx_t_9 = __pyx_t_14; + goto __pyx_L116_bool_binop_done; + } + __pyx_t_4 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_v_frame); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1059, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1059, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_14) { + } else { + __pyx_t_9 = __pyx_t_14; + goto __pyx_L116_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_is_line != 0); + __pyx_t_9 = __pyx_t_14; + __pyx_L116_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1060 + * stop = True + * if step_cmd in (108, 159) and (self._is_same_frame(stop_frame, frame) and is_line): + * stop = False # we don't stop on breakpoint if we have to stop by step-over (it will be processed later) # <<<<<<<<<<<<<< + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1059 + * new_frame = frame + * stop = True + * if step_cmd in (108, 159) and (self._is_same_frame(stop_frame, frame) and is_line): # <<<<<<<<<<<<<< + * stop = False # we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1055 + * stop_reason = CMD_SET_FUNCTION_BREAK + * + * elif is_line and info.pydev_state != 2 and breakpoints_for_file is not None and line in breakpoints_for_file: # <<<<<<<<<<<<<< + * breakpoint = breakpoints_for_file[line] + * new_frame = frame + */ + goto __pyx_L110; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1061 + * if step_cmd in (108, 159) and (self._is_same_frame(stop_frame, frame) and is_line): + * stop = False # we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<< + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: + */ + __pyx_t_14 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_11 = (__pyx_t_14 != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L119_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_line_breaks); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1061, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1061, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_9 = __pyx_t_11; + __pyx_L119_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1062 + * stop = False # we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) # <<<<<<<<<<<<<< + * if result: + * exist_result = True + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_get_breakpoint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1062, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[6] = {__pyx_t_7, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 5+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1062, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[6] = {__pyx_t_7, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 5+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1062, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_3 = PyTuple_New(5+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1062, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_5, __pyx_v_main_debugger); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_5, ((PyObject *)__pyx_v_self)); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_3, 3+__pyx_t_5, __pyx_v_event); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_3, 4+__pyx_t_5, __pyx_v_self->_args); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1062, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1063 + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: # <<<<<<<<<<<<<< + * exist_result = True + * flag, breakpoint, new_frame, bp_type = result + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1063, __pyx_L104_error) + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1064 + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: + * exist_result = True # <<<<<<<<<<<<<< + * flag, breakpoint, new_frame, bp_type = result + * + */ + __pyx_v_exist_result = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1065 + * if result: + * exist_result = True + * flag, breakpoint, new_frame, bp_type = result # <<<<<<<<<<<<<< + * + * if breakpoint: + */ + if ((likely(PyTuple_CheckExact(__pyx_v_result))) || (PyList_CheckExact(__pyx_v_result))) { + PyObject* sequence = __pyx_v_result; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 4)) { + if (size > 4) __Pyx_RaiseTooManyValuesError(4); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 1065, __pyx_L104_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 2); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 3); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_1 = PyList_GET_ITEM(sequence, 1); + __pyx_t_3 = PyList_GET_ITEM(sequence, 2); + __pyx_t_7 = PyList_GET_ITEM(sequence, 3); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_7); + #else + { + Py_ssize_t i; + PyObject** temps[4] = {&__pyx_t_4,&__pyx_t_1,&__pyx_t_3,&__pyx_t_7}; + for (i=0; i < 4; i++) { + PyObject* item = PySequence_ITEM(sequence, i); if (unlikely(!item)) __PYX_ERR(0, 1065, __pyx_L104_error) + __Pyx_GOTREF(item); + *(temps[i]) = item; + } + } + #endif + } else { + Py_ssize_t index = -1; + PyObject** temps[4] = {&__pyx_t_4,&__pyx_t_1,&__pyx_t_3,&__pyx_t_7}; + __pyx_t_6 = PyObject_GetIter(__pyx_v_result); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1065, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_15 = Py_TYPE(__pyx_t_6)->tp_iternext; + for (index=0; index < 4; index++) { + PyObject* item = __pyx_t_15(__pyx_t_6); if (unlikely(!item)) goto __pyx_L122_unpacking_failed; + __Pyx_GOTREF(item); + *(temps[index]) = item; + } + if (__Pyx_IternextUnpackEndCheck(__pyx_t_15(__pyx_t_6), 4) < 0) __PYX_ERR(0, 1065, __pyx_L104_error) + __pyx_t_15 = NULL; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L123_unpacking_done; + __pyx_L122_unpacking_failed:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_15 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 1065, __pyx_L104_error) + __pyx_L123_unpacking_done:; + } + __Pyx_DECREF_SET(__pyx_v_flag, __pyx_t_4); + __pyx_t_4 = 0; + __Pyx_DECREF_SET(__pyx_v_breakpoint, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_v_new_frame = __pyx_t_3; + __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_bp_type, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1063 + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: # <<<<<<<<<<<<<< + * exist_result = True + * flag, breakpoint, new_frame, bp_type = result + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1061 + * if step_cmd in (108, 159) and (self._is_same_frame(stop_frame, frame) and is_line): + * stop = False # we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<< + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: + */ + } + __pyx_L110:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1067 + * flag, breakpoint, new_frame, bp_type = result + * + * if breakpoint: # <<<<<<<<<<<<<< + * # ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + * # lets do the conditional stuff here + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoint); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1067, __pyx_L104_error) + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1070 + * # ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + * # lets do the conditional stuff here + * if breakpoint.expression is not None: # <<<<<<<<<<<<<< + * main_debugger.handle_breakpoint_expression(breakpoint, info, new_frame) + * if breakpoint.is_logpoint and info.pydev_message is not None and len(info.pydev_message) > 0: + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_expression); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1070, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = (__pyx_t_7 != Py_None); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_11 = (__pyx_t_9 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1071 + * # lets do the conditional stuff here + * if breakpoint.expression is not None: + * main_debugger.handle_breakpoint_expression(breakpoint, info, new_frame) # <<<<<<<<<<<<<< + * if breakpoint.is_logpoint and info.pydev_message is not None and len(info.pydev_message) > 0: + * cmd = main_debugger.cmd_factory.make_io_message(info.pydev_message + os.linesep, '1') + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_handle_breakpoint_expression); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1071, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_3); + if (unlikely(!__pyx_v_new_frame)) { __Pyx_RaiseUnboundLocalError("new_frame"); __PYX_ERR(0, 1071, __pyx_L104_error) } + __pyx_t_1 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_breakpoint, ((PyObject *)__pyx_v_info), __pyx_v_new_frame}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1071, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_breakpoint, ((PyObject *)__pyx_v_info), __pyx_v_new_frame}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1071, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_4 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1071, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_breakpoint); + __Pyx_GIVEREF(__pyx_v_breakpoint); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_5, __pyx_v_breakpoint); + __Pyx_INCREF(((PyObject *)__pyx_v_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_info)); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_5, ((PyObject *)__pyx_v_info)); + __Pyx_INCREF(__pyx_v_new_frame); + __Pyx_GIVEREF(__pyx_v_new_frame); + PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_5, __pyx_v_new_frame); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1071, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1072 + * if breakpoint.expression is not None: + * main_debugger.handle_breakpoint_expression(breakpoint, info, new_frame) + * if breakpoint.is_logpoint and info.pydev_message is not None and len(info.pydev_message) > 0: # <<<<<<<<<<<<<< + * cmd = main_debugger.cmd_factory.make_io_message(info.pydev_message + os.linesep, '1') + * main_debugger.writer.add_command(cmd) + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_is_logpoint); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1072, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1072, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_9) { + } else { + __pyx_t_11 = __pyx_t_9; + goto __pyx_L127_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_info->pydev_message != ((PyObject*)Py_None)); + __pyx_t_14 = (__pyx_t_9 != 0); + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L127_bool_binop_done; + } + __pyx_t_7 = __pyx_v_info->pydev_message; + __Pyx_INCREF(__pyx_t_7); + __pyx_t_20 = PyObject_Length(__pyx_t_7); if (unlikely(__pyx_t_20 == ((Py_ssize_t)-1))) __PYX_ERR(0, 1072, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_14 = ((__pyx_t_20 > 0) != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L127_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1073 + * main_debugger.handle_breakpoint_expression(breakpoint, info, new_frame) + * if breakpoint.is_logpoint and info.pydev_message is not None and len(info.pydev_message) > 0: + * cmd = main_debugger.cmd_factory.make_io_message(info.pydev_message + os.linesep, '1') # <<<<<<<<<<<<<< + * main_debugger.writer.add_command(cmd) + * + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_cmd_factory); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1073, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_make_io_message); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1073, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_os); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1073, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_linesep); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1073, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyNumber_Add(__pyx_v_info->pydev_message, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1073, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_t_3, __pyx_kp_s_1}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1073, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_t_3, __pyx_kp_s_1}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1073, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1073, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_t_3); + __Pyx_INCREF(__pyx_kp_s_1); + __Pyx_GIVEREF(__pyx_kp_s_1); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_kp_s_1); + __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_6, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1073, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_cmd = __pyx_t_7; + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1074 + * if breakpoint.is_logpoint and info.pydev_message is not None and len(info.pydev_message) > 0: + * cmd = main_debugger.cmd_factory.make_io_message(info.pydev_message + os.linesep, '1') + * main_debugger.writer.add_command(cmd) # <<<<<<<<<<<<<< + * + * if stop or exist_result: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_writer); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1074, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_add_command); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1074, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + __pyx_t_7 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_4, __pyx_v_cmd) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_v_cmd); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1074, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1072 + * if breakpoint.expression is not None: + * main_debugger.handle_breakpoint_expression(breakpoint, info, new_frame) + * if breakpoint.is_logpoint and info.pydev_message is not None and len(info.pydev_message) > 0: # <<<<<<<<<<<<<< + * cmd = main_debugger.cmd_factory.make_io_message(info.pydev_message + os.linesep, '1') + * main_debugger.writer.add_command(cmd) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1070 + * # ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + * # lets do the conditional stuff here + * if breakpoint.expression is not None: # <<<<<<<<<<<<<< + * main_debugger.handle_breakpoint_expression(breakpoint, info, new_frame) + * if breakpoint.is_logpoint and info.pydev_message is not None and len(info.pydev_message) > 0: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1076 + * main_debugger.writer.add_command(cmd) + * + * if stop or exist_result: # <<<<<<<<<<<<<< + * eval_result = False + * if breakpoint.has_condition: + */ + __pyx_t_14 = (__pyx_v_stop != 0); + if (!__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L131_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_exist_result != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L131_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1077 + * + * if stop or exist_result: + * eval_result = False # <<<<<<<<<<<<<< + * if breakpoint.has_condition: + * eval_result = main_debugger.handle_breakpoint_condition(info, breakpoint, new_frame) + */ + __Pyx_INCREF(Py_False); + __pyx_v_eval_result = Py_False; + + /* "_pydevd_bundle/pydevd_cython.pyx":1078 + * if stop or exist_result: + * eval_result = False + * if breakpoint.has_condition: # <<<<<<<<<<<<<< + * eval_result = main_debugger.handle_breakpoint_condition(info, breakpoint, new_frame) + * + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_has_condition); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1078, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1078, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1079 + * eval_result = False + * if breakpoint.has_condition: + * eval_result = main_debugger.handle_breakpoint_condition(info, breakpoint, new_frame) # <<<<<<<<<<<<<< + * + * if breakpoint.has_condition: + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_handle_breakpoint_condition); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1079, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_6); + if (unlikely(!__pyx_v_new_frame)) { __Pyx_RaiseUnboundLocalError("new_frame"); __PYX_ERR(0, 1079, __pyx_L104_error) } + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, ((PyObject *)__pyx_v_info), __pyx_v_breakpoint, __pyx_v_new_frame}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1079, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, ((PyObject *)__pyx_v_info), __pyx_v_breakpoint, __pyx_v_new_frame}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1079, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_3 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1079, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(((PyObject *)__pyx_v_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_info)); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_5, ((PyObject *)__pyx_v_info)); + __Pyx_INCREF(__pyx_v_breakpoint); + __Pyx_GIVEREF(__pyx_v_breakpoint); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_5, __pyx_v_breakpoint); + __Pyx_INCREF(__pyx_v_new_frame); + __Pyx_GIVEREF(__pyx_v_new_frame); + PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_5, __pyx_v_new_frame); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_3, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1079, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF_SET(__pyx_v_eval_result, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1078 + * if stop or exist_result: + * eval_result = False + * if breakpoint.has_condition: # <<<<<<<<<<<<<< + * eval_result = main_debugger.handle_breakpoint_condition(info, breakpoint, new_frame) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1081 + * eval_result = main_debugger.handle_breakpoint_condition(info, breakpoint, new_frame) + * + * if breakpoint.has_condition: # <<<<<<<<<<<<<< + * if not eval_result: + * stop = False + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_has_condition); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1081, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1081, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1082 + * + * if breakpoint.has_condition: + * if not eval_result: # <<<<<<<<<<<<<< + * stop = False + * elif breakpoint.is_logpoint: + */ + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_v_eval_result); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1082, __pyx_L104_error) + __pyx_t_14 = ((!__pyx_t_11) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1083 + * if breakpoint.has_condition: + * if not eval_result: + * stop = False # <<<<<<<<<<<<<< + * elif breakpoint.is_logpoint: + * stop = False + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1082 + * + * if breakpoint.has_condition: + * if not eval_result: # <<<<<<<<<<<<<< + * stop = False + * elif breakpoint.is_logpoint: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1081 + * eval_result = main_debugger.handle_breakpoint_condition(info, breakpoint, new_frame) + * + * if breakpoint.has_condition: # <<<<<<<<<<<<<< + * if not eval_result: + * stop = False + */ + goto __pyx_L134; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1084 + * if not eval_result: + * stop = False + * elif breakpoint.is_logpoint: # <<<<<<<<<<<<<< + * stop = False + * + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_is_logpoint); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1084, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1084, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1085 + * stop = False + * elif breakpoint.is_logpoint: + * stop = False # <<<<<<<<<<<<<< + * + * if is_call and (frame.f_code.co_name in ('', '') or (line == 1 and frame.f_code.co_name.startswith('', '') or (line == 1 and frame.f_code.co_name.startswith('. + * + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * if main_debugger.show_return_values: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1099, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L108_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1087 + * stop = False + * + * if is_call and (frame.f_code.co_name in ('', '') or (line == 1 and frame.f_code.co_name.startswith('pydev_step_cmd) { + case 0x6C: + case 0x9F: + case 0x80: + __pyx_t_9 = 1; + break; + default: + __pyx_t_9 = 0; + break; + } + __pyx_t_11 = (__pyx_t_9 != 0); + if (!__pyx_t_11) { + goto __pyx_L147_next_or; + } else { + } + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1103, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1103, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1103, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (!__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L145_bool_binop_done; + } + __pyx_L147_next_or:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1104 + * if is_return and ( + * (info.pydev_step_cmd in (108, 159, 128) and (self._is_same_frame(stop_frame, frame.f_back))) or + * (info.pydev_step_cmd in (109, 160) and (self._is_same_frame(stop_frame, frame))) or # <<<<<<<<<<<<<< + * (info.pydev_step_cmd in (107, 206)) or + * ( + */ + switch (__pyx_v_info->pydev_step_cmd) { + case 0x6D: + case 0xA0: + __pyx_t_11 = 1; + break; + default: + __pyx_t_11 = 0; + break; + } + __pyx_t_9 = (__pyx_t_11 != 0); + if (!__pyx_t_9) { + goto __pyx_L149_next_or; + } else { + } + __pyx_t_7 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_v_frame); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1104, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1104, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (!__pyx_t_9) { + } else { + __pyx_t_14 = __pyx_t_9; + goto __pyx_L145_bool_binop_done; + } + __pyx_L149_next_or:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1105 + * (info.pydev_step_cmd in (108, 159, 128) and (self._is_same_frame(stop_frame, frame.f_back))) or + * (info.pydev_step_cmd in (109, 160) and (self._is_same_frame(stop_frame, frame))) or + * (info.pydev_step_cmd in (107, 206)) or # <<<<<<<<<<<<<< + * ( + * info.pydev_step_cmd == 144 + */ + switch (__pyx_v_info->pydev_step_cmd) { + case 0x6B: + case 0xCE: + __pyx_t_9 = 1; + break; + default: + __pyx_t_9 = 0; + break; + } + __pyx_t_11 = (__pyx_t_9 != 0); + if (!__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L145_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1107 + * (info.pydev_step_cmd in (107, 206)) or + * ( + * info.pydev_step_cmd == 144 # <<<<<<<<<<<<<< + * and frame.f_back is not None + * and not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True) + */ + __pyx_t_11 = ((__pyx_v_info->pydev_step_cmd == 0x90) != 0); + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L145_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1108 + * ( + * info.pydev_step_cmd == 144 + * and frame.f_back is not None # <<<<<<<<<<<<<< + * and not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True) + * ) + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1108, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_11 = (__pyx_t_7 != Py_None); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_9 = (__pyx_t_11 != 0); + if (__pyx_t_9) { + } else { + __pyx_t_14 = __pyx_t_9; + goto __pyx_L145_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1109 + * info.pydev_step_cmd == 144 + * and frame.f_back is not None + * and not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True) # <<<<<<<<<<<<<< + * ) + * ): + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_t_3, __pyx_t_4, Py_True}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_t_3, __pyx_t_4, Py_True}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_5, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_5, __pyx_t_4); + __Pyx_INCREF(Py_True); + __Pyx_GIVEREF(Py_True); + PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_5, Py_True); + __pyx_t_3 = 0; + __pyx_t_4 = 0; + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1109, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_11 = ((!__pyx_t_9) != 0); + __pyx_t_14 = __pyx_t_11; + __pyx_L145_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1102 + * + * if main_debugger.show_return_values: + * if is_return and ( # <<<<<<<<<<<<<< + * (info.pydev_step_cmd in (108, 159, 128) and (self._is_same_frame(stop_frame, frame.f_back))) or + * (info.pydev_step_cmd in (109, 160) and (self._is_same_frame(stop_frame, frame))) or + */ + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1112 + * ) + * ): + * self._show_return_values(frame, arg) # <<<<<<<<<<<<<< + * + * elif main_debugger.remove_return_values_flag: + */ + __pyx_t_7 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_show_return_values(__pyx_v_self, __pyx_v_frame, __pyx_v_arg); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1112, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1102 + * + * if main_debugger.show_return_values: + * if is_return and ( # <<<<<<<<<<<<<< + * (info.pydev_step_cmd in (108, 159, 128) and (self._is_same_frame(stop_frame, frame.f_back))) or + * (info.pydev_step_cmd in (109, 160) and (self._is_same_frame(stop_frame, frame))) or + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1101 + * return self.trace_dispatch + * + * if main_debugger.show_return_values: # <<<<<<<<<<<<<< + * if is_return and ( + * (info.pydev_step_cmd in (108, 159, 128) and (self._is_same_frame(stop_frame, frame.f_back))) or + */ + goto __pyx_L143; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1114 + * self._show_return_values(frame, arg) + * + * elif main_debugger.remove_return_values_flag: # <<<<<<<<<<<<<< + * try: + * self._remove_return_values(main_debugger, frame) + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_remove_return_values_flag); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1114, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1114, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1115 + * + * elif main_debugger.remove_return_values_flag: + * try: # <<<<<<<<<<<<<< + * self._remove_return_values(main_debugger, frame) + * finally: + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1116 + * elif main_debugger.remove_return_values_flag: + * try: + * self._remove_return_values(main_debugger, frame) # <<<<<<<<<<<<<< + * finally: + * main_debugger.remove_return_values_flag = False + */ + __pyx_t_7 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_remove_return_values(__pyx_v_self, __pyx_v_main_debugger, __pyx_v_frame); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1116, __pyx_L155_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1118 + * self._remove_return_values(main_debugger, frame) + * finally: + * main_debugger.remove_return_values_flag = False # <<<<<<<<<<<<<< + * + * if stop: + */ + /*finally:*/ { + /*normal exit:*/{ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_main_debugger, __pyx_n_s_remove_return_values_flag, Py_False) < 0) __PYX_ERR(0, 1118, __pyx_L104_error) + goto __pyx_L156; + } + __pyx_L155_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; __pyx_t_27 = 0; __pyx_t_28 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_21); __pyx_t_21 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_26, &__pyx_t_27, &__pyx_t_28); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_23, &__pyx_t_24, &__pyx_t_25) < 0)) __Pyx_ErrFetch(&__pyx_t_23, &__pyx_t_24, &__pyx_t_25); + __Pyx_XGOTREF(__pyx_t_23); + __Pyx_XGOTREF(__pyx_t_24); + __Pyx_XGOTREF(__pyx_t_25); + __Pyx_XGOTREF(__pyx_t_26); + __Pyx_XGOTREF(__pyx_t_27); + __Pyx_XGOTREF(__pyx_t_28); + __pyx_t_5 = __pyx_lineno; __pyx_t_10 = __pyx_clineno; __pyx_t_22 = __pyx_filename; + { + if (__Pyx_PyObject_SetAttrStr(__pyx_v_main_debugger, __pyx_n_s_remove_return_values_flag, Py_False) < 0) __PYX_ERR(0, 1118, __pyx_L158_error) + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_XGIVEREF(__pyx_t_27); + __Pyx_XGIVEREF(__pyx_t_28); + __Pyx_ExceptionReset(__pyx_t_26, __pyx_t_27, __pyx_t_28); + } + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_ErrRestore(__pyx_t_23, __pyx_t_24, __pyx_t_25); + __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; __pyx_t_27 = 0; __pyx_t_28 = 0; + __pyx_lineno = __pyx_t_5; __pyx_clineno = __pyx_t_10; __pyx_filename = __pyx_t_22; + goto __pyx_L104_error; + __pyx_L158_error:; + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_XGIVEREF(__pyx_t_27); + __Pyx_XGIVEREF(__pyx_t_28); + __Pyx_ExceptionReset(__pyx_t_26, __pyx_t_27, __pyx_t_28); + } + __Pyx_XDECREF(__pyx_t_23); __pyx_t_23 = 0; + __Pyx_XDECREF(__pyx_t_24); __pyx_t_24 = 0; + __Pyx_XDECREF(__pyx_t_25); __pyx_t_25 = 0; + __pyx_t_26 = 0; __pyx_t_27 = 0; __pyx_t_28 = 0; + goto __pyx_L104_error; + } + __pyx_L156:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1114 + * self._show_return_values(frame, arg) + * + * elif main_debugger.remove_return_values_flag: # <<<<<<<<<<<<<< + * try: + * self._remove_return_values(main_debugger, frame) + */ + } + __pyx_L143:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1120 + * main_debugger.remove_return_values_flag = False + * + * if stop: # <<<<<<<<<<<<<< + * self.set_suspend( + * thread, + */ + __pyx_t_14 = (__pyx_v_stop != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1121 + * + * if stop: + * self.set_suspend( # <<<<<<<<<<<<<< + * thread, + * stop_reason, + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1121, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + + /* "_pydevd_bundle/pydevd_cython.pyx":1123 + * self.set_suspend( + * thread, + * stop_reason, # <<<<<<<<<<<<<< + * suspend_other_threads=breakpoint and breakpoint.suspend_policy == "ALL", + * ) + */ + __pyx_t_6 = PyTuple_New(2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1121, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_stop_reason); + __Pyx_GIVEREF(__pyx_v_stop_reason); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_v_stop_reason); + + /* "_pydevd_bundle/pydevd_cython.pyx":1124 + * thread, + * stop_reason, + * suspend_other_threads=breakpoint and breakpoint.suspend_policy == "ALL", # <<<<<<<<<<<<<< + * ) + * + */ + __pyx_t_8 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1124, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoint); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1124, __pyx_L104_error) + if (__pyx_t_14) { + } else { + __Pyx_INCREF(__pyx_v_breakpoint); + __pyx_t_4 = __pyx_v_breakpoint; + goto __pyx_L160_bool_binop_done; + } + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_suspend_policy); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1124, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_n_s_ALL, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1124, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_4 = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_L160_bool_binop_done:; + if (PyDict_SetItem(__pyx_t_8, __pyx_n_s_suspend_other_threads, __pyx_t_4) < 0) __PYX_ERR(0, 1124, __pyx_L104_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1121 + * + * if stop: + * self.set_suspend( # <<<<<<<<<<<<<< + * thread, + * stop_reason, + */ + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_6, __pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1121, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1120 + * main_debugger.remove_return_values_flag = False + * + * if stop: # <<<<<<<<<<<<<< + * self.set_suspend( + * thread, + */ + goto __pyx_L159; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1127 + * ) + * + * elif flag and plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: + */ + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_v_flag); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1127, __pyx_L104_error) + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L162_bool_binop_done; + } + __pyx_t_11 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_9 = (__pyx_t_11 != 0); + __pyx_t_14 = __pyx_t_9; + __pyx_L162_bool_binop_done:; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1128 + * + * elif flag and plugin_manager is not None: + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) # <<<<<<<<<<<<<< + * if result: + * frame = result + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_suspend); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1128, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[5] = {__pyx_t_6, __pyx_v_main_debugger, __pyx_v_thread, __pyx_v_frame, __pyx_v_bp_type}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 4+__pyx_t_10); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1128, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[5] = {__pyx_t_6, __pyx_v_main_debugger, __pyx_v_thread, __pyx_v_frame, __pyx_v_bp_type}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 4+__pyx_t_10); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1128, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_7 = PyTuple_New(4+__pyx_t_10); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1128, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_10, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_10, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_10, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_bp_type); + __Pyx_GIVEREF(__pyx_v_bp_type); + PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_10, __pyx_v_bp_type); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_7, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1128, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_result, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1129 + * elif flag and plugin_manager is not None: + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: # <<<<<<<<<<<<<< + * frame = result + * + */ + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1129, __pyx_L104_error) + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1130 + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: + * frame = result # <<<<<<<<<<<<<< + * + * # if thread has a suspend flag, we suspend with a busy wait + */ + __Pyx_INCREF(__pyx_v_result); + __Pyx_DECREF_SET(__pyx_v_frame, __pyx_v_result); + + /* "_pydevd_bundle/pydevd_cython.pyx":1129 + * elif flag and plugin_manager is not None: + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: # <<<<<<<<<<<<<< + * frame = result + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1127 + * ) + * + * elif flag and plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: + */ + } + __pyx_L159:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1133 + * + * # if thread has a suspend flag, we suspend with a busy wait + * if info.pydev_state == 2: # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, frame, event, arg) + * return self.trace_dispatch + */ + __pyx_t_14 = ((__pyx_v_info->pydev_state == 2) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1134 + * # if thread has a suspend flag, we suspend with a busy wait + * if info.pydev_state == 2: + * self.do_wait_suspend(thread, frame, event, arg) # <<<<<<<<<<<<<< + * return self.trace_dispatch + * else: + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1134, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 4+__pyx_t_10); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1134, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 4+__pyx_t_10); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1134, __pyx_L104_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_6 = PyTuple_New(4+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1134, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_10, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_10, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_10, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_6, 3+__pyx_t_10, __pyx_v_arg); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_6, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1134, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1135 + * if info.pydev_state == 2: + * self.do_wait_suspend(thread, frame, event, arg) + * return self.trace_dispatch # <<<<<<<<<<<<<< + * else: + * if not breakpoint and is_line: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1135, __pyx_L104_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L108_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1133 + * + * # if thread has a suspend flag, we suspend with a busy wait + * if info.pydev_state == 2: # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, frame, event, arg) + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1137 + * return self.trace_dispatch + * else: + * if not breakpoint and is_line: # <<<<<<<<<<<<<< + * # No stop from anyone and no breakpoint found in line (cache that). + * frame_skips_cache[line_cache_key] = 0 + */ + /*else*/ { + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoint); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1137, __pyx_L104_error) + __pyx_t_11 = ((!__pyx_t_9) != 0); + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L167_bool_binop_done; + } + __pyx_t_11 = (__pyx_v_is_line != 0); + __pyx_t_14 = __pyx_t_11; + __pyx_L167_bool_binop_done:; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1139 + * if not breakpoint and is_line: + * # No stop from anyone and no breakpoint found in line (cache that). + * frame_skips_cache[line_cache_key] = 0 # <<<<<<<<<<<<<< + * + * except: + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1139, __pyx_L104_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_frame_skips_cache, __pyx_v_line_cache_key, __pyx_int_0) < 0)) __PYX_ERR(0, 1139, __pyx_L104_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1137 + * return self.trace_dispatch + * else: + * if not breakpoint and is_line: # <<<<<<<<<<<<<< + * # No stop from anyone and no breakpoint found in line (cache that). + * frame_skips_cache[line_cache_key] = 0 + */ + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1037 + * # if DEBUG: print('NOT skipped: %s %s %s %s' % (frame.f_lineno, frame.f_code.co_name, event, frame.__class__.__name__)) + * + * try: # <<<<<<<<<<<<<< + * flag = False + * # return is not taken into account for breakpoint hit because we'd have a double-hit in this case + */ + } + __Pyx_XDECREF(__pyx_t_18); __pyx_t_18 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + goto __pyx_L109_try_end; + __pyx_L104_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_21); __pyx_t_21 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1141 + * frame_skips_cache[line_cache_key] = 0 + * + * except: # <<<<<<<<<<<<<< + * pydev_log.exception() + * raise + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_4, &__pyx_t_8, &__pyx_t_6) < 0) __PYX_ERR(0, 1141, __pyx_L106_except_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_6); + + /* "_pydevd_bundle/pydevd_cython.pyx":1142 + * + * except: + * pydev_log.exception() # <<<<<<<<<<<<<< + * raise + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1142, __pyx_L106_except_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_exception); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1142, __pyx_L106_except_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_7 = (__pyx_t_1) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_1) : __Pyx_PyObject_CallNoArg(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1142, __pyx_L106_except_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1143 + * except: + * pydev_log.exception() + * raise # <<<<<<<<<<<<<< + * + * # step handling. We stop when we hit the right frame + */ + __Pyx_GIVEREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_ErrRestoreWithState(__pyx_t_4, __pyx_t_8, __pyx_t_6); + __pyx_t_4 = 0; __pyx_t_8 = 0; __pyx_t_6 = 0; + __PYX_ERR(0, 1143, __pyx_L106_except_error) + } + __pyx_L106_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1037 + * # if DEBUG: print('NOT skipped: %s %s %s %s' % (frame.f_lineno, frame.f_code.co_name, event, frame.__class__.__name__)) + * + * try: # <<<<<<<<<<<<<< + * flag = False + * # return is not taken into account for breakpoint hit because we'd have a double-hit in this case + */ + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_18, __pyx_t_17, __pyx_t_16); + goto __pyx_L4_error; + __pyx_L108_try_return:; + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_18, __pyx_t_17, __pyx_t_16); + goto __pyx_L3_return; + __pyx_L109_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1146 + * + * # step handling. We stop when we hit the right frame + * try: # <<<<<<<<<<<<<< + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_16, &__pyx_t_17, &__pyx_t_18); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __Pyx_XGOTREF(__pyx_t_18); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1147 + * # step handling. We stop when we hit the right frame + * try: + * should_skip = 0 # <<<<<<<<<<<<<< + * if pydevd_dont_trace.should_trace_hook is not None: + * if self.should_skip == -1: + */ + __pyx_v_should_skip = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1148 + * try: + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: # <<<<<<<<<<<<<< + * if self.should_skip == -1: + * # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1148, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_should_trace_hook); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1148, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_14 = (__pyx_t_8 != Py_None); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = (__pyx_t_14 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1149 + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + * if self.should_skip == -1: # <<<<<<<<<<<<<< + * # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + * # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + */ + __pyx_t_11 = ((__pyx_v_self->should_skip == -1L) != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1153 + * # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + * # Which will be handled by this frame is read-only, so, we can cache it safely. + * if not pydevd_dont_trace.should_trace_hook(frame, abs_path_canonical_path_and_base[0]): # <<<<<<<<<<<<<< + * # -1, 0, 1 to be Cython-friendly + * should_skip = self.should_skip = 1 + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1153, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_should_trace_hook); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1153, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(__pyx_v_abs_path_canonical_path_and_base == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1153, __pyx_L171_error) + } + __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v_abs_path_canonical_path_and_base, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1153, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_frame, __pyx_t_6}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1153, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_frame, __pyx_t_6}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1153, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(2+__pyx_t_10); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1153, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_10, __pyx_v_frame); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_10, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_3, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1153, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1153, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_14 = ((!__pyx_t_11) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1155 + * if not pydevd_dont_trace.should_trace_hook(frame, abs_path_canonical_path_and_base[0]): + * # -1, 0, 1 to be Cython-friendly + * should_skip = self.should_skip = 1 # <<<<<<<<<<<<<< + * else: + * should_skip = self.should_skip = 0 + */ + __pyx_v_should_skip = 1; + __pyx_v_self->should_skip = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1153 + * # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + * # Which will be handled by this frame is read-only, so, we can cache it safely. + * if not pydevd_dont_trace.should_trace_hook(frame, abs_path_canonical_path_and_base[0]): # <<<<<<<<<<<<<< + * # -1, 0, 1 to be Cython-friendly + * should_skip = self.should_skip = 1 + */ + goto __pyx_L179; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1157 + * should_skip = self.should_skip = 1 + * else: + * should_skip = self.should_skip = 0 # <<<<<<<<<<<<<< + * else: + * should_skip = self.should_skip + */ + /*else*/ { + __pyx_v_should_skip = 0; + __pyx_v_self->should_skip = 0; + } + __pyx_L179:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1149 + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + * if self.should_skip == -1: # <<<<<<<<<<<<<< + * # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + * # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + */ + goto __pyx_L178; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1159 + * should_skip = self.should_skip = 0 + * else: + * should_skip = self.should_skip # <<<<<<<<<<<<<< + * + * plugin_stop = False + */ + /*else*/ { + __pyx_t_10 = __pyx_v_self->should_skip; + __pyx_v_should_skip = __pyx_t_10; + } + __pyx_L178:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1148 + * try: + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: # <<<<<<<<<<<<<< + * if self.should_skip == -1: + * # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1161 + * should_skip = self.should_skip + * + * plugin_stop = False # <<<<<<<<<<<<<< + * if should_skip: + * stop = False + */ + __Pyx_INCREF(Py_False); + __pyx_v_plugin_stop = Py_False; + + /* "_pydevd_bundle/pydevd_cython.pyx":1162 + * + * plugin_stop = False + * if should_skip: # <<<<<<<<<<<<<< + * stop = False + * + */ + __pyx_t_14 = (__pyx_v_should_skip != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1163 + * plugin_stop = False + * if should_skip: + * stop = False # <<<<<<<<<<<<<< + * + * elif step_cmd in (107, 144, 206): + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1162 + * + * plugin_stop = False + * if should_skip: # <<<<<<<<<<<<<< + * stop = False + * + */ + goto __pyx_L180; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1165 + * stop = False + * + * elif step_cmd in (107, 144, 206): # <<<<<<<<<<<<<< + * force_check_project_scope = step_cmd == 144 + * if is_line: + */ + switch (__pyx_v_step_cmd) { + case 0x6B: + case 0x90: + case 0xCE: + __pyx_t_14 = 1; + break; + default: + __pyx_t_14 = 0; + break; + } + __pyx_t_11 = (__pyx_t_14 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1166 + * + * elif step_cmd in (107, 144, 206): + * force_check_project_scope = step_cmd == 144 # <<<<<<<<<<<<<< + * if is_line: + * if not info.pydev_use_scoped_step_frame: + */ + __pyx_t_8 = __Pyx_PyBool_FromLong((__pyx_v_step_cmd == 0x90)); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1166, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_v_force_check_project_scope = __pyx_t_8; + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1167 + * elif step_cmd in (107, 144, 206): + * force_check_project_scope = step_cmd == 144 + * if is_line: # <<<<<<<<<<<<<< + * if not info.pydev_use_scoped_step_frame: + * if force_check_project_scope or main_debugger.is_files_filter_enabled: + */ + __pyx_t_11 = (__pyx_v_is_line != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1168 + * force_check_project_scope = step_cmd == 144 + * if is_line: + * if not info.pydev_use_scoped_step_frame: # <<<<<<<<<<<<<< + * if force_check_project_scope or main_debugger.is_files_filter_enabled: + * stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope) + */ + __pyx_t_11 = ((!(__pyx_v_info->pydev_use_scoped_step_frame != 0)) != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1169 + * if is_line: + * if not info.pydev_use_scoped_step_frame: + * if force_check_project_scope or main_debugger.is_files_filter_enabled: # <<<<<<<<<<<<<< + * stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope) + * else: + */ + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_v_force_check_project_scope); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1169, __pyx_L171_error) + if (!__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L184_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_is_files_filter_enabled); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1169, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1169, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = __pyx_t_14; + __pyx_L184_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1170 + * if not info.pydev_use_scoped_step_frame: + * if force_check_project_scope or main_debugger.is_files_filter_enabled: + * stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope) # <<<<<<<<<<<<<< + * else: + * stop = True + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1170, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1170, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1170, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_frame, __pyx_t_6, __pyx_v_force_check_project_scope}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1170, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_frame, __pyx_t_6, __pyx_v_force_check_project_scope}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1170, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_7 = PyTuple_New(3+__pyx_t_10); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1170, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_10, __pyx_v_frame); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_10, __pyx_t_6); + __Pyx_INCREF(__pyx_v_force_check_project_scope); + __Pyx_GIVEREF(__pyx_v_force_check_project_scope); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_10, __pyx_v_force_check_project_scope); + __pyx_t_6 = 0; + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1170, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1170, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_stop = (!__pyx_t_11); + + /* "_pydevd_bundle/pydevd_cython.pyx":1169 + * if is_line: + * if not info.pydev_use_scoped_step_frame: + * if force_check_project_scope or main_debugger.is_files_filter_enabled: # <<<<<<<<<<<<<< + * stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope) + * else: + */ + goto __pyx_L183; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1172 + * stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope) + * else: + * stop = True # <<<<<<<<<<<<<< + * else: + * # We can only stop inside the ipython call. + */ + /*else*/ { + __pyx_v_stop = 1; + } + __pyx_L183:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1168 + * force_check_project_scope = step_cmd == 144 + * if is_line: + * if not info.pydev_use_scoped_step_frame: # <<<<<<<<<<<<<< + * if force_check_project_scope or main_debugger.is_files_filter_enabled: + * stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope) + */ + goto __pyx_L182; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1175 + * else: + * # We can only stop inside the ipython call. + * filename = frame.f_code.co_filename # <<<<<<<<<<<<<< + * if filename.endswith('.pyc'): + * filename = filename[:-1] + */ + /*else*/ { + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1175, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1175, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_filename = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1176 + * # We can only stop inside the ipython call. + * filename = frame.f_code.co_filename + * if filename.endswith('.pyc'): # <<<<<<<<<<<<<< + * filename = filename[:-1] + * + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_filename, __pyx_n_s_endswith); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1176, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_7, __pyx_kp_s_pyc) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_kp_s_pyc); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1176, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1176, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1177 + * filename = frame.f_code.co_filename + * if filename.endswith('.pyc'): + * filename = filename[:-1] # <<<<<<<<<<<<<< + * + * if not filename.endswith(PYDEVD_IPYTHON_CONTEXT[0]): + */ + __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_filename, 0, -1L, NULL, NULL, &__pyx_slice__4, 0, 1, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1177, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_filename, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1176 + * # We can only stop inside the ipython call. + * filename = frame.f_code.co_filename + * if filename.endswith('.pyc'): # <<<<<<<<<<<<<< + * filename = filename[:-1] + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1179 + * filename = filename[:-1] + * + * if not filename.endswith(PYDEVD_IPYTHON_CONTEXT[0]): # <<<<<<<<<<<<<< + * f = frame.f_back + * while f is not None: + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_filename, __pyx_n_s_endswith); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1179, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_PYDEVD_IPYTHON_CONTEXT); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1179, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = __Pyx_GetItemInt(__pyx_t_7, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1179, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_7, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1179, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1179, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_14 = ((!__pyx_t_11) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1180 + * + * if not filename.endswith(PYDEVD_IPYTHON_CONTEXT[0]): + * f = frame.f_back # <<<<<<<<<<<<<< + * while f is not None: + * if f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1180, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_f, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1181 + * if not filename.endswith(PYDEVD_IPYTHON_CONTEXT[0]): + * f = frame.f_back + * while f is not None: # <<<<<<<<<<<<<< + * if f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + * f2 = f.f_back + */ + while (1) { + __pyx_t_14 = (__pyx_v_f != Py_None); + __pyx_t_11 = (__pyx_t_14 != 0); + if (!__pyx_t_11) break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1182 + * f = frame.f_back + * while f is not None: + * if f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: # <<<<<<<<<<<<<< + * f2 = f.f_back + * if f2 is not None and f2.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_code); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1182, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_co_name); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1182, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_PYDEVD_IPYTHON_CONTEXT); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1182, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_GetItemInt(__pyx_t_4, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1182, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_RichCompare(__pyx_t_8, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1182, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1182, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1183 + * while f is not None: + * if f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + * f2 = f.f_back # <<<<<<<<<<<<<< + * if f2 is not None and f2.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + * pydev_log.debug('Stop inside ipython call') + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1183, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_f2, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1184 + * if f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + * f2 = f.f_back + * if f2 is not None and f2.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: # <<<<<<<<<<<<<< + * pydev_log.debug('Stop inside ipython call') + * stop = True + */ + __pyx_t_14 = (__pyx_v_f2 != Py_None); + __pyx_t_9 = (__pyx_t_14 != 0); + if (__pyx_t_9) { + } else { + __pyx_t_11 = __pyx_t_9; + goto __pyx_L192_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f2, __pyx_n_s_f_code); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1184, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_co_name); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1184, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_PYDEVD_IPYTHON_CONTEXT); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1184, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = __Pyx_GetItemInt(__pyx_t_4, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1184, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_RichCompare(__pyx_t_6, __pyx_t_8, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1184, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1184, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_11 = __pyx_t_9; + __pyx_L192_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1185 + * f2 = f.f_back + * if f2 is not None and f2.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + * pydev_log.debug('Stop inside ipython call') # <<<<<<<<<<<<<< + * stop = True + * break + */ + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1185, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_debug); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1185, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + __pyx_t_4 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_8, __pyx_kp_s_Stop_inside_ipython_call) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_kp_s_Stop_inside_ipython_call); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1185, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1186 + * if f2 is not None and f2.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + * pydev_log.debug('Stop inside ipython call') + * stop = True # <<<<<<<<<<<<<< + * break + * f = f.f_back + */ + __pyx_v_stop = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1187 + * pydev_log.debug('Stop inside ipython call') + * stop = True + * break # <<<<<<<<<<<<<< + * f = f.f_back + * + */ + goto __pyx_L189_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1184 + * if f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + * f2 = f.f_back + * if f2 is not None and f2.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: # <<<<<<<<<<<<<< + * pydev_log.debug('Stop inside ipython call') + * stop = True + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1182 + * f = frame.f_back + * while f is not None: + * if f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: # <<<<<<<<<<<<<< + * f2 = f.f_back + * if f2 is not None and f2.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1188 + * stop = True + * break + * f = f.f_back # <<<<<<<<<<<<<< + * + * del f + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1188, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_f, __pyx_t_4); + __pyx_t_4 = 0; + } + __pyx_L189_break:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1190 + * f = f.f_back + * + * del f # <<<<<<<<<<<<<< + * + * if not stop: + */ + __Pyx_DECREF(__pyx_v_f); + __pyx_v_f = NULL; + + /* "_pydevd_bundle/pydevd_cython.pyx":1179 + * filename = filename[:-1] + * + * if not filename.endswith(PYDEVD_IPYTHON_CONTEXT[0]): # <<<<<<<<<<<<<< + * f = frame.f_back + * while f is not None: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1192 + * del f + * + * if not stop: # <<<<<<<<<<<<<< + * # In scoped mode if step in didn't work in this context it won't work + * # afterwards anyways. + */ + __pyx_t_11 = ((!(__pyx_v_stop != 0)) != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1195 + * # In scoped mode if step in didn't work in this context it won't work + * # afterwards anyways. + * return None if is_call else NO_FTRACE # <<<<<<<<<<<<<< + * + * elif is_return and frame.f_back is not None and not info.pydev_use_scoped_step_frame: + */ + __Pyx_XDECREF(__pyx_r); + if ((__pyx_v_is_call != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_4 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1195, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_4 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L175_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1192 + * del f + * + * if not stop: # <<<<<<<<<<<<<< + * # In scoped mode if step in didn't work in this context it won't work + * # afterwards anyways. + */ + } + } + __pyx_L182:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1167 + * elif step_cmd in (107, 144, 206): + * force_check_project_scope = step_cmd == 144 + * if is_line: # <<<<<<<<<<<<<< + * if not info.pydev_use_scoped_step_frame: + * if force_check_project_scope or main_debugger.is_files_filter_enabled: + */ + goto __pyx_L181; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1197 + * return None if is_call else NO_FTRACE + * + * elif is_return and frame.f_back is not None and not info.pydev_use_scoped_step_frame: # <<<<<<<<<<<<<< + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + * stop = False + */ + __pyx_t_9 = (__pyx_v_is_return != 0); + if (__pyx_t_9) { + } else { + __pyx_t_11 = __pyx_t_9; + goto __pyx_L195_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1197, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_9 = (__pyx_t_4 != Py_None); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_14 = (__pyx_t_9 != 0); + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L195_bool_binop_done; + } + __pyx_t_14 = ((!(__pyx_v_info->pydev_use_scoped_step_frame != 0)) != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L195_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1198 + * + * elif is_return and frame.f_back is not None and not info.pydev_use_scoped_step_frame: + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: # <<<<<<<<<<<<<< + * stop = False + * else: + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_get_file_type); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1198, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1198, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_t_8) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_8); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1198, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_PYDEV_FILE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1198, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = PyObject_RichCompare(__pyx_t_4, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_8); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1198, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1198, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1199 + * elif is_return and frame.f_back is not None and not info.pydev_use_scoped_step_frame: + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + * stop = False # <<<<<<<<<<<<<< + * else: + * if force_check_project_scope or main_debugger.is_files_filter_enabled: + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1198 + * + * elif is_return and frame.f_back is not None and not info.pydev_use_scoped_step_frame: + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: # <<<<<<<<<<<<<< + * stop = False + * else: + */ + goto __pyx_L198; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1201 + * stop = False + * else: + * if force_check_project_scope or main_debugger.is_files_filter_enabled: # <<<<<<<<<<<<<< + * stop = not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, force_check_project_scope) + * if stop: + */ + /*else*/ { + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_v_force_check_project_scope); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1201, __pyx_L171_error) + if (!__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L200_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_is_files_filter_enabled); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1201, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1201, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = __pyx_t_14; + __pyx_L200_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1202 + * else: + * if force_check_project_scope or main_debugger.is_files_filter_enabled: + * stop = not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, force_check_project_scope) # <<<<<<<<<<<<<< + * if stop: + * # Prevent stopping in a return to the same location we were initially + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_t_4, __pyx_t_7, __pyx_v_force_check_project_scope}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_t_4, __pyx_t_7, __pyx_v_force_check_project_scope}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_1 = PyTuple_New(3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_10, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_10, __pyx_t_7); + __Pyx_INCREF(__pyx_v_force_check_project_scope); + __Pyx_GIVEREF(__pyx_v_force_check_project_scope); + PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_10, __pyx_v_force_check_project_scope); + __pyx_t_4 = 0; + __pyx_t_7 = 0; + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_1, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1202, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_stop = (!__pyx_t_11); + + /* "_pydevd_bundle/pydevd_cython.pyx":1203 + * if force_check_project_scope or main_debugger.is_files_filter_enabled: + * stop = not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, force_check_project_scope) + * if stop: # <<<<<<<<<<<<<< + * # Prevent stopping in a return to the same location we were initially + * # (i.e.: double-stop at the same place due to some filtering). + */ + __pyx_t_11 = (__pyx_v_stop != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1206 + * # Prevent stopping in a return to the same location we were initially + * # (i.e.: double-stop at the same place due to some filtering). + * if info.step_in_initial_location == (frame.f_back, frame.f_back.f_lineno): # <<<<<<<<<<<<<< + * stop = False + * else: + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1206, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1206, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1206, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1206, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_1); + __pyx_t_8 = 0; + __pyx_t_1 = 0; + __pyx_t_1 = PyObject_RichCompare(__pyx_v_info->step_in_initial_location, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1206, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1206, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1207 + * # (i.e.: double-stop at the same place due to some filtering). + * if info.step_in_initial_location == (frame.f_back, frame.f_back.f_lineno): + * stop = False # <<<<<<<<<<<<<< + * else: + * stop = True + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1206 + * # Prevent stopping in a return to the same location we were initially + * # (i.e.: double-stop at the same place due to some filtering). + * if info.step_in_initial_location == (frame.f_back, frame.f_back.f_lineno): # <<<<<<<<<<<<<< + * stop = False + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1203 + * if force_check_project_scope or main_debugger.is_files_filter_enabled: + * stop = not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, force_check_project_scope) + * if stop: # <<<<<<<<<<<<<< + * # Prevent stopping in a return to the same location we were initially + * # (i.e.: double-stop at the same place due to some filtering). + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1201 + * stop = False + * else: + * if force_check_project_scope or main_debugger.is_files_filter_enabled: # <<<<<<<<<<<<<< + * stop = not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, force_check_project_scope) + * if stop: + */ + goto __pyx_L199; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1209 + * stop = False + * else: + * stop = True # <<<<<<<<<<<<<< + * else: + * stop = False + */ + /*else*/ { + __pyx_v_stop = 1; + } + __pyx_L199:; + } + __pyx_L198:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1197 + * return None if is_call else NO_FTRACE + * + * elif is_return and frame.f_back is not None and not info.pydev_use_scoped_step_frame: # <<<<<<<<<<<<<< + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + * stop = False + */ + goto __pyx_L181; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1211 + * stop = True + * else: + * stop = False # <<<<<<<<<<<<<< + * + * if stop: + */ + /*else*/ { + __pyx_v_stop = 0; + } + __pyx_L181:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1213 + * stop = False + * + * if stop: # <<<<<<<<<<<<<< + * if step_cmd == 206: + * # i.e.: Check if we're stepping into the proper context. + */ + __pyx_t_11 = (__pyx_v_stop != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1214 + * + * if stop: + * if step_cmd == 206: # <<<<<<<<<<<<<< + * # i.e.: Check if we're stepping into the proper context. + * f = frame + */ + __pyx_t_11 = ((__pyx_v_step_cmd == 0xCE) != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1216 + * if step_cmd == 206: + * # i.e.: Check if we're stepping into the proper context. + * f = frame # <<<<<<<<<<<<<< + * while f is not None: + * if self._is_same_frame(stop_frame, f): + */ + __Pyx_INCREF(__pyx_v_frame); + __Pyx_XDECREF_SET(__pyx_v_f, __pyx_v_frame); + + /* "_pydevd_bundle/pydevd_cython.pyx":1217 + * # i.e.: Check if we're stepping into the proper context. + * f = frame + * while f is not None: # <<<<<<<<<<<<<< + * if self._is_same_frame(stop_frame, f): + * break + */ + while (1) { + __pyx_t_11 = (__pyx_v_f != Py_None); + __pyx_t_14 = (__pyx_t_11 != 0); + if (!__pyx_t_14) break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1218 + * f = frame + * while f is not None: + * if self._is_same_frame(stop_frame, f): # <<<<<<<<<<<<<< + * break + * f = f.f_back + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_v_f); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1218, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1218, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1219 + * while f is not None: + * if self._is_same_frame(stop_frame, f): + * break # <<<<<<<<<<<<<< + * f = f.f_back + * else: + */ + goto __pyx_L207_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1218 + * f = frame + * while f is not None: + * if self._is_same_frame(stop_frame, f): # <<<<<<<<<<<<<< + * break + * f = f.f_back + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1220 + * if self._is_same_frame(stop_frame, f): + * break + * f = f.f_back # <<<<<<<<<<<<<< + * else: + * stop = False + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1220, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_f, __pyx_t_1); + __pyx_t_1 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1222 + * f = f.f_back + * else: + * stop = False # <<<<<<<<<<<<<< + * + * if plugin_manager is not None: + */ + /*else*/ { + __pyx_v_stop = 0; + } + __pyx_L207_break:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1214 + * + * if stop: + * if step_cmd == 206: # <<<<<<<<<<<<<< + * # i.e.: Check if we're stepping into the proper context. + * f = frame + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1213 + * stop = False + * + * if stop: # <<<<<<<<<<<<<< + * if step_cmd == 206: + * # i.e.: Check if we're stepping into the proper context. + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1224 + * stop = False + * + * if plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + */ + __pyx_t_14 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_11 = (__pyx_t_14 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1225 + * + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) # <<<<<<<<<<<<<< + * if result: + * stop, plugin_stop = result + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_cmd_step_into); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1225, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = __Pyx_PyBool_FromLong(__pyx_v_stop); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1225, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[7] = {__pyx_t_7, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_t_8}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 6+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1225, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[7] = {__pyx_t_7, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_t_8}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 6+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1225, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + { + __pyx_t_4 = PyTuple_New(6+__pyx_t_10); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1225, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_10, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_10, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_10, __pyx_v_event); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_4, 3+__pyx_t_10, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_stop_info); + __Pyx_GIVEREF(__pyx_v_stop_info); + PyTuple_SET_ITEM(__pyx_t_4, 4+__pyx_t_10, __pyx_v_stop_info); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_4, 5+__pyx_t_10, __pyx_t_8); + __pyx_t_8 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1225, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF_SET(__pyx_v_result, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1226 + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: # <<<<<<<<<<<<<< + * stop, plugin_stop = result + * + */ + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1226, __pyx_L171_error) + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1227 + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + * stop, plugin_stop = result # <<<<<<<<<<<<<< + * + * elif step_cmd in (108, 159): + */ + if ((likely(PyTuple_CheckExact(__pyx_v_result))) || (PyList_CheckExact(__pyx_v_result))) { + PyObject* sequence = __pyx_v_result; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 1227, __pyx_L171_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_1 = PyList_GET_ITEM(sequence, 0); + __pyx_t_6 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + #else + __pyx_t_1 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1227, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1227, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } else { + Py_ssize_t index = -1; + __pyx_t_4 = PyObject_GetIter(__pyx_v_result); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1227, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_15 = Py_TYPE(__pyx_t_4)->tp_iternext; + index = 0; __pyx_t_1 = __pyx_t_15(__pyx_t_4); if (unlikely(!__pyx_t_1)) goto __pyx_L211_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + index = 1; __pyx_t_6 = __pyx_t_15(__pyx_t_4); if (unlikely(!__pyx_t_6)) goto __pyx_L211_unpacking_failed; + __Pyx_GOTREF(__pyx_t_6); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_15(__pyx_t_4), 2) < 0) __PYX_ERR(0, 1227, __pyx_L171_error) + __pyx_t_15 = NULL; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L212_unpacking_done; + __pyx_L211_unpacking_failed:; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_15 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 1227, __pyx_L171_error) + __pyx_L212_unpacking_done:; + } + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_11 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1227, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_stop = __pyx_t_11; + __Pyx_DECREF_SET(__pyx_v_plugin_stop, __pyx_t_6); + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1226 + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: # <<<<<<<<<<<<<< + * stop, plugin_stop = result + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1224 + * stop = False + * + * if plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1165 + * stop = False + * + * elif step_cmd in (107, 144, 206): # <<<<<<<<<<<<<< + * force_check_project_scope = step_cmd == 144 + * if is_line: + */ + goto __pyx_L180; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1229 + * stop, plugin_stop = result + * + * elif step_cmd in (108, 159): # <<<<<<<<<<<<<< + * # Note: when dealing with a step over my code it's the same as a step over (the + * # difference is that when we return from a frame in one we go to regular step + */ + switch (__pyx_v_step_cmd) { + case 0x6C: + case 0x9F: + __pyx_t_11 = 1; + break; + default: + __pyx_t_11 = 0; + break; + } + __pyx_t_14 = (__pyx_t_11 != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1233 + * # difference is that when we return from a frame in one we go to regular step + * # into and in the other we go to a step into my code). + * stop = self._is_same_frame(stop_frame, frame) and is_line # <<<<<<<<<<<<<< + * # Note: don't stop on a return for step over, only for line events + * # i.e.: don't stop in: (stop_frame is frame.f_back and is_return) as we'd stop twice in that line. + */ + __pyx_t_6 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_v_frame); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1233, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1233, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_11) { + } else { + __pyx_t_14 = __pyx_t_11; + goto __pyx_L213_bool_binop_done; + } + __pyx_t_11 = (__pyx_v_is_line != 0); + __pyx_t_14 = __pyx_t_11; + __pyx_L213_bool_binop_done:; + __pyx_v_stop = __pyx_t_14; + + /* "_pydevd_bundle/pydevd_cython.pyx":1237 + * # i.e.: don't stop in: (stop_frame is frame.f_back and is_return) as we'd stop twice in that line. + * + * if plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + */ + __pyx_t_14 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_11 = (__pyx_t_14 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1238 + * + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) # <<<<<<<<<<<<<< + * if result: + * stop, plugin_stop = result + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_cmd_step_over); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1238, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_stop); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1238, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[7] = {__pyx_t_8, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_t_4}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_10, 6+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1238, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[7] = {__pyx_t_8, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_t_4}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_10, 6+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1238, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_7 = PyTuple_New(6+__pyx_t_10); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1238, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_10, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_10, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_10, __pyx_v_event); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_10, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_stop_info); + __Pyx_GIVEREF(__pyx_v_stop_info); + PyTuple_SET_ITEM(__pyx_t_7, 4+__pyx_t_10, __pyx_v_stop_info); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_7, 5+__pyx_t_10, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1238, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_result, __pyx_t_6); + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1239 + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: # <<<<<<<<<<<<<< + * stop, plugin_stop = result + * + */ + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1239, __pyx_L171_error) + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1240 + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + * stop, plugin_stop = result # <<<<<<<<<<<<<< + * + * elif step_cmd == 128: + */ + if ((likely(PyTuple_CheckExact(__pyx_v_result))) || (PyList_CheckExact(__pyx_v_result))) { + PyObject* sequence = __pyx_v_result; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 1240, __pyx_L171_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_6 = PyList_GET_ITEM(sequence, 0); + __pyx_t_1 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(__pyx_t_1); + #else + __pyx_t_6 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1240, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1240, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + } else { + Py_ssize_t index = -1; + __pyx_t_7 = PyObject_GetIter(__pyx_v_result); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1240, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_15 = Py_TYPE(__pyx_t_7)->tp_iternext; + index = 0; __pyx_t_6 = __pyx_t_15(__pyx_t_7); if (unlikely(!__pyx_t_6)) goto __pyx_L217_unpacking_failed; + __Pyx_GOTREF(__pyx_t_6); + index = 1; __pyx_t_1 = __pyx_t_15(__pyx_t_7); if (unlikely(!__pyx_t_1)) goto __pyx_L217_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_15(__pyx_t_7), 2) < 0) __PYX_ERR(0, 1240, __pyx_L171_error) + __pyx_t_15 = NULL; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L218_unpacking_done; + __pyx_L217_unpacking_failed:; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_15 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 1240, __pyx_L171_error) + __pyx_L218_unpacking_done:; + } + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely((__pyx_t_11 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1240, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_v_stop = __pyx_t_11; + __Pyx_DECREF_SET(__pyx_v_plugin_stop, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1239 + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: # <<<<<<<<<<<<<< + * stop, plugin_stop = result + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1237 + * # i.e.: don't stop in: (stop_frame is frame.f_back and is_return) as we'd stop twice in that line. + * + * if plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1229 + * stop, plugin_stop = result + * + * elif step_cmd in (108, 159): # <<<<<<<<<<<<<< + * # Note: when dealing with a step over my code it's the same as a step over (the + * # difference is that when we return from a frame in one we go to regular step + */ + goto __pyx_L180; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1242 + * stop, plugin_stop = result + * + * elif step_cmd == 128: # <<<<<<<<<<<<<< + * stop = False + * back = frame.f_back + */ + __pyx_t_11 = ((__pyx_v_step_cmd == 0x80) != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1243 + * + * elif step_cmd == 128: + * stop = False # <<<<<<<<<<<<<< + * back = frame.f_back + * if self._is_same_frame(stop_frame, frame) and is_return: + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1244 + * elif step_cmd == 128: + * stop = False + * back = frame.f_back # <<<<<<<<<<<<<< + * if self._is_same_frame(stop_frame, frame) and is_return: + * # We're exiting the smart step into initial frame (so, we probably didn't find our target). + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1244, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_back = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1245 + * stop = False + * back = frame.f_back + * if self._is_same_frame(stop_frame, frame) and is_return: # <<<<<<<<<<<<<< + * # We're exiting the smart step into initial frame (so, we probably didn't find our target). + * stop = True + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_v_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1245, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1245, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L220_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_is_return != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L220_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1247 + * if self._is_same_frame(stop_frame, frame) and is_return: + * # We're exiting the smart step into initial frame (so, we probably didn't find our target). + * stop = True # <<<<<<<<<<<<<< + * + * elif self._is_same_frame(stop_frame, back) and is_line: + */ + __pyx_v_stop = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1245 + * stop = False + * back = frame.f_back + * if self._is_same_frame(stop_frame, frame) and is_return: # <<<<<<<<<<<<<< + * # We're exiting the smart step into initial frame (so, we probably didn't find our target). + * stop = True + */ + goto __pyx_L219; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1249 + * stop = True + * + * elif self._is_same_frame(stop_frame, back) and is_line: # <<<<<<<<<<<<<< + * if info.pydev_smart_child_offset != -1: + * # i.e.: in this case, we're not interested in the pause in the parent, rather + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_v_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1249, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1249, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L222_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_is_line != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L222_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1250 + * + * elif self._is_same_frame(stop_frame, back) and is_line: + * if info.pydev_smart_child_offset != -1: # <<<<<<<<<<<<<< + * # i.e.: in this case, we're not interested in the pause in the parent, rather + * # we're interested in the pause in the child (when the parent is at the proper place). + */ + __pyx_t_11 = ((__pyx_v_info->pydev_smart_child_offset != -1L) != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1253 + * # i.e.: in this case, we're not interested in the pause in the parent, rather + * # we're interested in the pause in the child (when the parent is at the proper place). + * stop = False # <<<<<<<<<<<<<< + * + * else: + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1250 + * + * elif self._is_same_frame(stop_frame, back) and is_line: + * if info.pydev_smart_child_offset != -1: # <<<<<<<<<<<<<< + * # i.e.: in this case, we're not interested in the pause in the parent, rather + * # we're interested in the pause in the child (when the parent is at the proper place). + */ + goto __pyx_L224; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1256 + * + * else: + * pydev_smart_parent_offset = info.pydev_smart_parent_offset # <<<<<<<<<<<<<< + * + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + */ + /*else*/ { + __pyx_t_10 = __pyx_v_info->pydev_smart_parent_offset; + __pyx_v_pydev_smart_parent_offset = __pyx_t_10; + + /* "_pydevd_bundle/pydevd_cython.pyx":1258 + * pydev_smart_parent_offset = info.pydev_smart_parent_offset + * + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants # <<<<<<<<<<<<<< + * if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: + * # Preferred mode (when the smart step into variants are available + */ + __pyx_t_1 = __pyx_v_info->pydev_smart_step_into_variants; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pydev_smart_step_into_variants = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1259 + * + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + * if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: # <<<<<<<<<<<<<< + * # Preferred mode (when the smart step into variants are available + * # and the offset is set). + */ + __pyx_t_14 = ((__pyx_v_pydev_smart_parent_offset >= 0) != 0); + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L226_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_pydev_smart_step_into_variants != Py_None)&&(PyTuple_GET_SIZE(__pyx_v_pydev_smart_step_into_variants) != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L226_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1262 + * # Preferred mode (when the smart step into variants are available + * # and the offset is set). + * stop = get_smart_step_into_variant_from_frame_offset(back.f_lasti, pydev_smart_step_into_variants) is \ # <<<<<<<<<<<<<< + * get_smart_step_into_variant_from_frame_offset(pydev_smart_parent_offset, pydev_smart_step_into_variants) + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_get_smart_step_into_variant_from); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1262, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_back, __pyx_n_s_f_lasti); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1262, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_7, __pyx_v_pydev_smart_step_into_variants}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1262, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_7, __pyx_v_pydev_smart_step_into_variants}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1262, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1262, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_10, __pyx_t_7); + __Pyx_INCREF(__pyx_v_pydev_smart_step_into_variants); + __Pyx_GIVEREF(__pyx_v_pydev_smart_step_into_variants); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_10, __pyx_v_pydev_smart_step_into_variants); + __pyx_t_7 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1262, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1263 + * # and the offset is set). + * stop = get_smart_step_into_variant_from_frame_offset(back.f_lasti, pydev_smart_step_into_variants) is \ + * get_smart_step_into_variant_from_frame_offset(pydev_smart_parent_offset, pydev_smart_step_into_variants) # <<<<<<<<<<<<<< + * + * else: + */ + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_get_smart_step_into_variant_from); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1263, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_pydev_smart_parent_offset); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1263, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_7, __pyx_v_pydev_smart_step_into_variants}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1263, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_7, __pyx_v_pydev_smart_step_into_variants}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1263, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(2+__pyx_t_10); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1263, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_10, __pyx_t_7); + __Pyx_INCREF(__pyx_v_pydev_smart_step_into_variants); + __Pyx_GIVEREF(__pyx_v_pydev_smart_step_into_variants); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_10, __pyx_v_pydev_smart_step_into_variants); + __pyx_t_7 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_3, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1263, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = (__pyx_t_1 == __pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_v_stop = __pyx_t_11; + + /* "_pydevd_bundle/pydevd_cython.pyx":1259 + * + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + * if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: # <<<<<<<<<<<<<< + * # Preferred mode (when the smart step into variants are available + * # and the offset is set). + */ + goto __pyx_L225; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1267 + * else: + * # Only the name/line is available, so, check that. + * curr_func_name = frame.f_code.co_name # <<<<<<<<<<<<<< + * + * # global context is set with an empty name + */ + /*else*/ { + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1267, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_co_name); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1267, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 1267, __pyx_L171_error) + __Pyx_XDECREF_SET(__pyx_v_curr_func_name, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1270 + * + * # global context is set with an empty name + * if curr_func_name in ('?', '') or curr_func_name is None: # <<<<<<<<<<<<<< + * curr_func_name = '' + * if curr_func_name == info.pydev_func_name and stop_frame.f_lineno == info.pydev_next_line: + */ + __Pyx_INCREF(__pyx_v_curr_func_name); + __pyx_t_21 = __pyx_v_curr_func_name; + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_t_21, __pyx_kp_s__3, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1270, __pyx_L171_error) + __pyx_t_29 = (__pyx_t_9 != 0); + if (!__pyx_t_29) { + } else { + __pyx_t_14 = __pyx_t_29; + goto __pyx_L231_bool_binop_done; + } + __pyx_t_29 = (__Pyx_PyString_Equals(__pyx_t_21, __pyx_kp_s_module, Py_EQ)); if (unlikely(__pyx_t_29 < 0)) __PYX_ERR(0, 1270, __pyx_L171_error) + __pyx_t_9 = (__pyx_t_29 != 0); + __pyx_t_14 = __pyx_t_9; + __pyx_L231_bool_binop_done:; + __Pyx_DECREF(__pyx_t_21); __pyx_t_21 = 0; + __pyx_t_9 = (__pyx_t_14 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_11 = __pyx_t_9; + goto __pyx_L229_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_curr_func_name == ((PyObject*)Py_None)); + __pyx_t_14 = (__pyx_t_9 != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L229_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1271 + * # global context is set with an empty name + * if curr_func_name in ('?', '') or curr_func_name is None: + * curr_func_name = '' # <<<<<<<<<<<<<< + * if curr_func_name == info.pydev_func_name and stop_frame.f_lineno == info.pydev_next_line: + * stop = True + */ + __Pyx_INCREF(__pyx_kp_s_); + __Pyx_DECREF_SET(__pyx_v_curr_func_name, __pyx_kp_s_); + + /* "_pydevd_bundle/pydevd_cython.pyx":1270 + * + * # global context is set with an empty name + * if curr_func_name in ('?', '') or curr_func_name is None: # <<<<<<<<<<<<<< + * curr_func_name = '' + * if curr_func_name == info.pydev_func_name and stop_frame.f_lineno == info.pydev_next_line: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1272 + * if curr_func_name in ('?', '') or curr_func_name is None: + * curr_func_name = '' + * if curr_func_name == info.pydev_func_name and stop_frame.f_lineno == info.pydev_next_line: # <<<<<<<<<<<<<< + * stop = True + * + */ + __pyx_t_14 = (__Pyx_PyString_Equals(__pyx_v_curr_func_name, __pyx_v_info->pydev_func_name, Py_EQ)); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1272, __pyx_L171_error) + __pyx_t_9 = (__pyx_t_14 != 0); + if (__pyx_t_9) { + } else { + __pyx_t_11 = __pyx_t_9; + goto __pyx_L234_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_stop_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1272, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_info->pydev_next_line); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1272, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = PyObject_RichCompare(__pyx_t_1, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_8); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1272, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1272, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = __pyx_t_9; + __pyx_L234_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1273 + * curr_func_name = '' + * if curr_func_name == info.pydev_func_name and stop_frame.f_lineno == info.pydev_next_line: + * stop = True # <<<<<<<<<<<<<< + * + * if not stop: + */ + __pyx_v_stop = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1272 + * if curr_func_name in ('?', '') or curr_func_name is None: + * curr_func_name = '' + * if curr_func_name == info.pydev_func_name and stop_frame.f_lineno == info.pydev_next_line: # <<<<<<<<<<<<<< + * stop = True + * + */ + } + } + __pyx_L225:; + } + __pyx_L224:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1275 + * stop = True + * + * if not stop: # <<<<<<<<<<<<<< + * # In smart step into, if we didn't hit it in this frame once, that'll + * # not be the case next time either, so, disable tracing for this frame. + */ + __pyx_t_11 = ((!(__pyx_v_stop != 0)) != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1278 + * # In smart step into, if we didn't hit it in this frame once, that'll + * # not be the case next time either, so, disable tracing for this frame. + * return None if is_call else NO_FTRACE # <<<<<<<<<<<<<< + * + * elif back is not None and self._is_same_frame(stop_frame, back.f_back) and is_line: + */ + __Pyx_XDECREF(__pyx_r); + if ((__pyx_v_is_call != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_8 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1278, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L175_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1275 + * stop = True + * + * if not stop: # <<<<<<<<<<<<<< + * # In smart step into, if we didn't hit it in this frame once, that'll + * # not be the case next time either, so, disable tracing for this frame. + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1249 + * stop = True + * + * elif self._is_same_frame(stop_frame, back) and is_line: # <<<<<<<<<<<<<< + * if info.pydev_smart_child_offset != -1: + * # i.e.: in this case, we're not interested in the pause in the parent, rather + */ + goto __pyx_L219; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1280 + * return None if is_call else NO_FTRACE + * + * elif back is not None and self._is_same_frame(stop_frame, back.f_back) and is_line: # <<<<<<<<<<<<<< + * # Ok, we have to track 2 stops at this point, the parent and the child offset. + * # This happens when handling a step into which targets a function inside a list comprehension + */ + __pyx_t_9 = (__pyx_v_back != Py_None); + __pyx_t_14 = (__pyx_t_9 != 0); + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L237_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_back, __pyx_n_s_f_back); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1280, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_t_8); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1280, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1280, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L237_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_is_line != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L237_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1284 + * # This happens when handling a step into which targets a function inside a list comprehension + * # or generator (in which case an intermediary frame is created due to an internal function call). + * pydev_smart_parent_offset = info.pydev_smart_parent_offset # <<<<<<<<<<<<<< + * pydev_smart_child_offset = info.pydev_smart_child_offset + * # print('matched back frame', pydev_smart_parent_offset, pydev_smart_child_offset) + */ + __pyx_t_10 = __pyx_v_info->pydev_smart_parent_offset; + __pyx_v_pydev_smart_parent_offset = __pyx_t_10; + + /* "_pydevd_bundle/pydevd_cython.pyx":1285 + * # or generator (in which case an intermediary frame is created due to an internal function call). + * pydev_smart_parent_offset = info.pydev_smart_parent_offset + * pydev_smart_child_offset = info.pydev_smart_child_offset # <<<<<<<<<<<<<< + * # print('matched back frame', pydev_smart_parent_offset, pydev_smart_child_offset) + * # print('parent f_lasti', back.f_back.f_lasti) + */ + __pyx_t_10 = __pyx_v_info->pydev_smart_child_offset; + __pyx_v_pydev_smart_child_offset = __pyx_t_10; + + /* "_pydevd_bundle/pydevd_cython.pyx":1289 + * # print('parent f_lasti', back.f_back.f_lasti) + * # print('child f_lasti', back.f_lasti) + * stop = False # <<<<<<<<<<<<<< + * if pydev_smart_child_offset >= 0 and pydev_smart_child_offset >= 0: + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1290 + * # print('child f_lasti', back.f_lasti) + * stop = False + * if pydev_smart_child_offset >= 0 and pydev_smart_child_offset >= 0: # <<<<<<<<<<<<<< + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + * + */ + __pyx_t_14 = ((__pyx_v_pydev_smart_child_offset >= 0) != 0); + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L241_bool_binop_done; + } + __pyx_t_14 = ((__pyx_v_pydev_smart_child_offset >= 0) != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L241_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1291 + * stop = False + * if pydev_smart_child_offset >= 0 and pydev_smart_child_offset >= 0: + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants # <<<<<<<<<<<<<< + * + * if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: + */ + __pyx_t_6 = __pyx_v_info->pydev_smart_step_into_variants; + __Pyx_INCREF(__pyx_t_6); + __pyx_v_pydev_smart_step_into_variants = ((PyObject*)__pyx_t_6); + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1293 + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + * + * if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: # <<<<<<<<<<<<<< + * # Note that we don't really check the parent offset, only the offset of + * # the child (because this is a generator, the parent may have moved forward + */ + __pyx_t_14 = ((__pyx_v_pydev_smart_parent_offset >= 0) != 0); + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L244_bool_binop_done; + } + __pyx_t_14 = (__pyx_v_pydev_smart_step_into_variants != Py_None)&&(PyTuple_GET_SIZE(__pyx_v_pydev_smart_step_into_variants) != 0); + __pyx_t_11 = __pyx_t_14; + __pyx_L244_bool_binop_done:; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1298 + * # already -- and that's ok, so, we just check that the parent frame + * # matches in this case). + * smart_step_into_variant = get_smart_step_into_variant_from_frame_offset(pydev_smart_parent_offset, pydev_smart_step_into_variants) # <<<<<<<<<<<<<< + * # print('matched parent offset', pydev_smart_parent_offset) + * # Ok, now, check the child variant + */ + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_get_smart_step_into_variant_from); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1298, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_pydev_smart_parent_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1298, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_t_1, __pyx_v_pydev_smart_step_into_variants}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1298, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_t_1, __pyx_v_pydev_smart_step_into_variants}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1298, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + { + __pyx_t_7 = PyTuple_New(2+__pyx_t_10); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1298, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_10, __pyx_t_1); + __Pyx_INCREF(__pyx_v_pydev_smart_step_into_variants); + __Pyx_GIVEREF(__pyx_v_pydev_smart_step_into_variants); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_10, __pyx_v_pydev_smart_step_into_variants); + __pyx_t_1 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1298, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_smart_step_into_variant = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1301 + * # print('matched parent offset', pydev_smart_parent_offset) + * # Ok, now, check the child variant + * children_variants = smart_step_into_variant.children_variants # <<<<<<<<<<<<<< + * stop = children_variants and ( + * get_smart_step_into_variant_from_frame_offset(back.f_lasti, children_variants) is \ + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_smart_step_into_variant, __pyx_n_s_children_variants); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1301, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_v_children_variants = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1302 + * # Ok, now, check the child variant + * children_variants = smart_step_into_variant.children_variants + * stop = children_variants and ( # <<<<<<<<<<<<<< + * get_smart_step_into_variant_from_frame_offset(back.f_lasti, children_variants) is \ + * get_smart_step_into_variant_from_frame_offset(pydev_smart_child_offset, children_variants) + */ + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_v_children_variants); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1302, __pyx_L171_error) + if (__pyx_t_14) { + } else { + __pyx_t_11 = __pyx_t_14; + goto __pyx_L246_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1303 + * children_variants = smart_step_into_variant.children_variants + * stop = children_variants and ( + * get_smart_step_into_variant_from_frame_offset(back.f_lasti, children_variants) is \ # <<<<<<<<<<<<<< + * get_smart_step_into_variant_from_frame_offset(pydev_smart_child_offset, children_variants) + * ) + */ + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_get_smart_step_into_variant_from); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1303, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_back, __pyx_n_s_f_lasti); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1303, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_t_7, __pyx_v_children_variants}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1303, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_t_7, __pyx_v_children_variants}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1303, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(2+__pyx_t_10); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1303, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_10, __pyx_t_7); + __Pyx_INCREF(__pyx_v_children_variants); + __Pyx_GIVEREF(__pyx_v_children_variants); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_10, __pyx_v_children_variants); + __pyx_t_7 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_3, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1303, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1304 + * stop = children_variants and ( + * get_smart_step_into_variant_from_frame_offset(back.f_lasti, children_variants) is \ + * get_smart_step_into_variant_from_frame_offset(pydev_smart_child_offset, children_variants) # <<<<<<<<<<<<<< + * ) + * # print('stop at child', stop) + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_get_smart_step_into_variant_from); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1304, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_pydev_smart_child_offset); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1304, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_t_7, __pyx_v_children_variants}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1304, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_t_7, __pyx_v_children_variants}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1304, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_4 = PyTuple_New(2+__pyx_t_10); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1304, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_10, __pyx_t_7); + __Pyx_INCREF(__pyx_v_children_variants); + __Pyx_GIVEREF(__pyx_v_children_variants); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_10, __pyx_v_children_variants); + __pyx_t_7 = 0; + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1304, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_14 = (__pyx_t_6 == __pyx_t_8); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1303 + * children_variants = smart_step_into_variant.children_variants + * stop = children_variants and ( + * get_smart_step_into_variant_from_frame_offset(back.f_lasti, children_variants) is \ # <<<<<<<<<<<<<< + * get_smart_step_into_variant_from_frame_offset(pydev_smart_child_offset, children_variants) + * ) + */ + __pyx_t_9 = (__pyx_t_14 != 0); + __pyx_t_11 = __pyx_t_9; + __pyx_L246_bool_binop_done:; + __pyx_v_stop = __pyx_t_11; + + /* "_pydevd_bundle/pydevd_cython.pyx":1293 + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + * + * if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: # <<<<<<<<<<<<<< + * # Note that we don't really check the parent offset, only the offset of + * # the child (because this is a generator, the parent may have moved forward + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1290 + * # print('child f_lasti', back.f_lasti) + * stop = False + * if pydev_smart_child_offset >= 0 and pydev_smart_child_offset >= 0: # <<<<<<<<<<<<<< + * pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1308 + * # print('stop at child', stop) + * + * if not stop: # <<<<<<<<<<<<<< + * # In smart step into, if we didn't hit it in this frame once, that'll + * # not be the case next time either, so, disable tracing for this frame. + */ + __pyx_t_11 = ((!(__pyx_v_stop != 0)) != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1311 + * # In smart step into, if we didn't hit it in this frame once, that'll + * # not be the case next time either, so, disable tracing for this frame. + * return None if is_call else NO_FTRACE # <<<<<<<<<<<<<< + * + * elif step_cmd in (109, 160): + */ + __Pyx_XDECREF(__pyx_r); + if ((__pyx_v_is_call != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_8 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1311, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L175_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1308 + * # print('stop at child', stop) + * + * if not stop: # <<<<<<<<<<<<<< + * # In smart step into, if we didn't hit it in this frame once, that'll + * # not be the case next time either, so, disable tracing for this frame. + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1280 + * return None if is_call else NO_FTRACE + * + * elif back is not None and self._is_same_frame(stop_frame, back.f_back) and is_line: # <<<<<<<<<<<<<< + * # Ok, we have to track 2 stops at this point, the parent and the child offset. + * # This happens when handling a step into which targets a function inside a list comprehension + */ + } + __pyx_L219:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1242 + * stop, plugin_stop = result + * + * elif step_cmd == 128: # <<<<<<<<<<<<<< + * stop = False + * back = frame.f_back + */ + goto __pyx_L180; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1313 + * return None if is_call else NO_FTRACE + * + * elif step_cmd in (109, 160): # <<<<<<<<<<<<<< + * stop = is_return and self._is_same_frame(stop_frame, frame) + * + */ + switch (__pyx_v_step_cmd) { + case 0x6D: + case 0xA0: + __pyx_t_11 = 1; + break; + default: + __pyx_t_11 = 0; + break; + } + __pyx_t_9 = (__pyx_t_11 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1314 + * + * elif step_cmd in (109, 160): + * stop = is_return and self._is_same_frame(stop_frame, frame) # <<<<<<<<<<<<<< + * + * else: + */ + __pyx_t_11 = (__pyx_v_is_return != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L249_bool_binop_done; + } + __pyx_t_8 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self->__pyx_vtab)->_is_same_frame(__pyx_v_self, __pyx_v_stop_frame, __pyx_v_frame); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1314, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1314, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_9 = __pyx_t_11; + __pyx_L249_bool_binop_done:; + __pyx_v_stop = __pyx_t_9; + + /* "_pydevd_bundle/pydevd_cython.pyx":1313 + * return None if is_call else NO_FTRACE + * + * elif step_cmd in (109, 160): # <<<<<<<<<<<<<< + * stop = is_return and self._is_same_frame(stop_frame, frame) + * + */ + goto __pyx_L180; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1317 + * + * else: + * stop = False # <<<<<<<<<<<<<< + * + * if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): + */ + /*else*/ { + __pyx_v_stop = 0; + } + __pyx_L180:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1319 + * stop = False + * + * if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): # <<<<<<<<<<<<<< + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: + */ + __pyx_t_11 = (__pyx_v_stop != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L252_bool_binop_done; + } + __pyx_t_11 = ((__pyx_v_step_cmd != -1L) != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L252_bool_binop_done; + } + __pyx_t_11 = (__pyx_v_is_return != 0); + if (__pyx_t_11) { + } else { + __pyx_t_9 = __pyx_t_11; + goto __pyx_L252_bool_binop_done; + } + __pyx_t_11 = __Pyx_HasAttr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(__pyx_t_11 == ((int)-1))) __PYX_ERR(0, 1319, __pyx_L171_error) + __pyx_t_14 = (__pyx_t_11 != 0); + __pyx_t_9 = __pyx_t_14; + __pyx_L252_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1320 + * + * if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): + * f_code = getattr(frame.f_back, 'f_code', None) # <<<<<<<<<<<<<< + * if f_code is not None: + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1320, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = __Pyx_GetAttr3(__pyx_t_8, __pyx_n_s_f_code, Py_None); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1320, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_f_code = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1321 + * if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: # <<<<<<<<<<<<<< + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + * stop = False + */ + __pyx_t_9 = (__pyx_v_f_code != Py_None); + __pyx_t_14 = (__pyx_t_9 != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1322 + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: # <<<<<<<<<<<<<< + * stop = False + * + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_get_file_type); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1322, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1322, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_6 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_4, __pyx_t_3) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1322, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_PYDEV_FILE); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1322, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_6, __pyx_t_8, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1322, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1322, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1323 + * if f_code is not None: + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + * stop = False # <<<<<<<<<<<<<< + * + * if plugin_stop: + */ + __pyx_v_stop = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1322 + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: # <<<<<<<<<<<<<< + * stop = False + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1321 + * if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: # <<<<<<<<<<<<<< + * if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + * stop = False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1319 + * stop = False + * + * if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): # <<<<<<<<<<<<<< + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1325 + * stop = False + * + * if plugin_stop: # <<<<<<<<<<<<<< + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: + */ + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_v_plugin_stop); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1325, __pyx_L171_error) + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1326 + * + * if plugin_stop: + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) # <<<<<<<<<<<<<< + * elif stop: + * if is_line: + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_stop); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1326, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1326, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_4 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[8] = {__pyx_t_4, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_v_arg, __pyx_t_6}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 7+__pyx_t_10); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1326, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[8] = {__pyx_t_4, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_v_arg, __pyx_t_6}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 7+__pyx_t_10); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1326, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_7 = PyTuple_New(7+__pyx_t_10); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1326, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_10, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_10, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_10, __pyx_v_event); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_10, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_stop_info); + __Pyx_GIVEREF(__pyx_v_stop_info); + PyTuple_SET_ITEM(__pyx_t_7, 4+__pyx_t_10, __pyx_v_stop_info); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_7, 5+__pyx_t_10, __pyx_v_arg); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_7, 6+__pyx_t_10, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1326, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_stopped_on_plugin = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1325 + * stop = False + * + * if plugin_stop: # <<<<<<<<<<<<<< + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: + */ + goto __pyx_L258; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1327 + * if plugin_stop: + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: # <<<<<<<<<<<<<< + * if is_line: + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + */ + __pyx_t_14 = (__pyx_v_stop != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1328 + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: + * if is_line: # <<<<<<<<<<<<<< + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + * self.do_wait_suspend(thread, frame, event, arg) + */ + __pyx_t_14 = (__pyx_v_is_line != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1329 + * elif stop: + * if is_line: + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, frame, event, arg) + * elif is_return: # return event + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1329, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_8 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1329, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1329, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_v_thread); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_8); + __pyx_t_8 = 0; + __pyx_t_8 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1329, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_info->pydev_original_step_cmd); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1329, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + if (PyDict_SetItem(__pyx_t_8, __pyx_n_s_original_step_cmd, __pyx_t_6) < 0) __PYX_ERR(0, 1329, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_7, __pyx_t_8); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1329, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1330 + * if is_line: + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + * self.do_wait_suspend(thread, frame, event, arg) # <<<<<<<<<<<<<< + * elif is_return: # return event + * back = frame.f_back + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1330, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 4+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1330, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_6); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_10, 4+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1330, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_6); + } else + #endif + { + __pyx_t_3 = PyTuple_New(4+__pyx_t_10); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1330, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_10, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_10, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_10, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_3, 3+__pyx_t_10, __pyx_v_arg); + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_3, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1330, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1328 + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: + * if is_line: # <<<<<<<<<<<<<< + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + * self.do_wait_suspend(thread, frame, event, arg) + */ + goto __pyx_L259; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1331 + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + * self.do_wait_suspend(thread, frame, event, arg) + * elif is_return: # return event # <<<<<<<<<<<<<< + * back = frame.f_back + * if back is not None: + */ + __pyx_t_14 = (__pyx_v_is_return != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1332 + * self.do_wait_suspend(thread, frame, event, arg) + * elif is_return: # return event + * back = frame.f_back # <<<<<<<<<<<<<< + * if back is not None: + * # When we get to the pydevd run function, the debugging has actually finished for the main thread + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1332, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_XDECREF_SET(__pyx_v_back, __pyx_t_6); + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1333 + * elif is_return: # return event + * back = frame.f_back + * if back is not None: # <<<<<<<<<<<<<< + * # When we get to the pydevd run function, the debugging has actually finished for the main thread + * # (note that it can still go on for other threads, but for this one, we just make it finish) + */ + __pyx_t_14 = (__pyx_v_back != Py_None); + __pyx_t_9 = (__pyx_t_14 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1337 + * # (note that it can still go on for other threads, but for this one, we just make it finish) + * # So, just setting it to None should be OK + * back_absolute_filename, _, base = get_abs_path_real_path_and_base_from_frame(back) # <<<<<<<<<<<<<< + * if (base, back.f_code.co_name) in (DEBUG_START, DEBUG_START_PY3K): + * back = None + */ + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1337, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_6 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_3, __pyx_v_back) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_v_back); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1337, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_6))) || (PyList_CheckExact(__pyx_t_6))) { + PyObject* sequence = __pyx_t_6; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 3)) { + if (size > 3) __Pyx_RaiseTooManyValuesError(3); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 1337, __pyx_L171_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_8 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 2); + } else { + __pyx_t_8 = PyList_GET_ITEM(sequence, 0); + __pyx_t_3 = PyList_GET_ITEM(sequence, 1); + __pyx_t_7 = PyList_GET_ITEM(sequence, 2); + } + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_7); + #else + __pyx_t_8 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1337, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_3 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1337, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1337, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + #endif + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_4 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1337, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_15 = Py_TYPE(__pyx_t_4)->tp_iternext; + index = 0; __pyx_t_8 = __pyx_t_15(__pyx_t_4); if (unlikely(!__pyx_t_8)) goto __pyx_L261_unpacking_failed; + __Pyx_GOTREF(__pyx_t_8); + index = 1; __pyx_t_3 = __pyx_t_15(__pyx_t_4); if (unlikely(!__pyx_t_3)) goto __pyx_L261_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 2; __pyx_t_7 = __pyx_t_15(__pyx_t_4); if (unlikely(!__pyx_t_7)) goto __pyx_L261_unpacking_failed; + __Pyx_GOTREF(__pyx_t_7); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_15(__pyx_t_4), 3) < 0) __PYX_ERR(0, 1337, __pyx_L171_error) + __pyx_t_15 = NULL; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L262_unpacking_done; + __pyx_L261_unpacking_failed:; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_15 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 1337, __pyx_L171_error) + __pyx_L262_unpacking_done:; + } + __pyx_v_back_absolute_filename = __pyx_t_8; + __pyx_t_8 = 0; + __pyx_v__ = __pyx_t_3; + __pyx_t_3 = 0; + __pyx_v_base = __pyx_t_7; + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1338 + * # So, just setting it to None should be OK + * back_absolute_filename, _, base = get_abs_path_real_path_and_base_from_frame(back) + * if (base, back.f_code.co_name) in (DEBUG_START, DEBUG_START_PY3K): # <<<<<<<<<<<<<< + * back = None + * + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_back, __pyx_n_s_f_code); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1338, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_co_name); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1338, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1338, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_INCREF(__pyx_v_base); + __Pyx_GIVEREF(__pyx_v_base); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_v_base); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_7); + __pyx_t_7 = 0; + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_DEBUG_START); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1338, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_6, __pyx_t_7, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1338, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1338, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!__pyx_t_14) { + } else { + __pyx_t_9 = __pyx_t_14; + goto __pyx_L264_bool_binop_done; + } + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_DEBUG_START_PY3K); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1338, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = PyObject_RichCompare(__pyx_t_6, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1338, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1338, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_9 = __pyx_t_14; + __pyx_L264_bool_binop_done:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_14 = (__pyx_t_9 != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1339 + * back_absolute_filename, _, base = get_abs_path_real_path_and_base_from_frame(back) + * if (base, back.f_code.co_name) in (DEBUG_START, DEBUG_START_PY3K): + * back = None # <<<<<<<<<<<<<< + * + * elif base == TRACE_PROPERTY: + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_back, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":1338 + * # So, just setting it to None should be OK + * back_absolute_filename, _, base = get_abs_path_real_path_and_base_from_frame(back) + * if (base, back.f_code.co_name) in (DEBUG_START, DEBUG_START_PY3K): # <<<<<<<<<<<<<< + * back = None + * + */ + goto __pyx_L263; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1341 + * back = None + * + * elif base == TRACE_PROPERTY: # <<<<<<<<<<<<<< + * # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + * # if we're in a return, we want it to appear to the user in the previous frame! + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_TRACE_PROPERTY); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1341, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyObject_RichCompare(__pyx_v_base, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1341, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_14 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_14 < 0)) __PYX_ERR(0, 1341, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1344 + * # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + * # if we're in a return, we want it to appear to the user in the previous frame! + * return None if is_call else NO_FTRACE # <<<<<<<<<<<<<< + * + * elif pydevd_dont_trace.should_trace_hook is not None: + */ + __Pyx_XDECREF(__pyx_r); + if ((__pyx_v_is_call != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_7 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1344, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L175_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1341 + * back = None + * + * elif base == TRACE_PROPERTY: # <<<<<<<<<<<<<< + * # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + * # if we're in a return, we want it to appear to the user in the previous frame! + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1346 + * return None if is_call else NO_FTRACE + * + * elif pydevd_dont_trace.should_trace_hook is not None: # <<<<<<<<<<<<<< + * if not pydevd_dont_trace.should_trace_hook(back, back_absolute_filename): + * # In this case, we'll have to skip the previous one because it shouldn't be traced. + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1346, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_should_trace_hook); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1346, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_14 = (__pyx_t_6 != Py_None); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_9 = (__pyx_t_14 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1347 + * + * elif pydevd_dont_trace.should_trace_hook is not None: + * if not pydevd_dont_trace.should_trace_hook(back, back_absolute_filename): # <<<<<<<<<<<<<< + * # In this case, we'll have to skip the previous one because it shouldn't be traced. + * # Also, we have to reset the tracing, because if the parent's parent (or some + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1347, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_should_trace_hook); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1347, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_back, __pyx_v_back_absolute_filename}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1347, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_6); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_back, __pyx_v_back_absolute_filename}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1347, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_6); + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1347, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_back); + __Pyx_GIVEREF(__pyx_v_back); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_10, __pyx_v_back); + __Pyx_INCREF(__pyx_v_back_absolute_filename); + __Pyx_GIVEREF(__pyx_v_back_absolute_filename); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_10, __pyx_v_back_absolute_filename); + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_8, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1347, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1347, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_14 = ((!__pyx_t_9) != 0); + if (__pyx_t_14) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1353 + * # we should anymore (so, a step in/over/return may not stop anywhere if no parent is traced). + * # Related test: _debugger_case17a.py + * main_debugger.set_trace_for_frame_and_parents(back) # <<<<<<<<<<<<<< + * return None if is_call else NO_FTRACE + * + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_set_trace_for_frame_and_parents); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1353, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_6 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_8, __pyx_v_back) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_back); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1353, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1354 + * # Related test: _debugger_case17a.py + * main_debugger.set_trace_for_frame_and_parents(back) + * return None if is_call else NO_FTRACE # <<<<<<<<<<<<<< + * + * if back is not None: + */ + __Pyx_XDECREF(__pyx_r); + if ((__pyx_v_is_call != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_6 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1354, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __pyx_t_3; + __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L175_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1347 + * + * elif pydevd_dont_trace.should_trace_hook is not None: + * if not pydevd_dont_trace.should_trace_hook(back, back_absolute_filename): # <<<<<<<<<<<<<< + * # In this case, we'll have to skip the previous one because it shouldn't be traced. + * # Also, we have to reset the tracing, because if the parent's parent (or some + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1346 + * return None if is_call else NO_FTRACE + * + * elif pydevd_dont_trace.should_trace_hook is not None: # <<<<<<<<<<<<<< + * if not pydevd_dont_trace.should_trace_hook(back, back_absolute_filename): + * # In this case, we'll have to skip the previous one because it shouldn't be traced. + */ + } + __pyx_L263:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1333 + * elif is_return: # return event + * back = frame.f_back + * if back is not None: # <<<<<<<<<<<<<< + * # When we get to the pydevd run function, the debugging has actually finished for the main thread + * # (note that it can still go on for other threads, but for this one, we just make it finish) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1356 + * return None if is_call else NO_FTRACE + * + * if back is not None: # <<<<<<<<<<<<<< + * # if we're in a return, we want it to appear to the user in the previous frame! + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + */ + __pyx_t_14 = (__pyx_v_back != Py_None); + __pyx_t_9 = (__pyx_t_14 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1358 + * if back is not None: + * # if we're in a return, we want it to appear to the user in the previous frame! + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, back, event, arg) + * else: + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1358, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1358, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_8 = PyTuple_New(2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1358, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_v_thread); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1358, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_info->pydev_original_step_cmd); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1358, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_original_step_cmd, __pyx_t_7) < 0) __PYX_ERR(0, 1358, __pyx_L171_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, __pyx_t_3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1358, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1359 + * # if we're in a return, we want it to appear to the user in the previous frame! + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + * self.do_wait_suspend(thread, back, event, arg) # <<<<<<<<<<<<<< + * else: + * # in jython we may not have a back frame + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1359, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_8 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[5] = {__pyx_t_8, __pyx_v_thread, __pyx_v_back, __pyx_v_event, __pyx_v_arg}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_10, 4+__pyx_t_10); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1359, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[5] = {__pyx_t_8, __pyx_v_thread, __pyx_v_back, __pyx_v_event, __pyx_v_arg}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_10, 4+__pyx_t_10); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1359, __pyx_L171_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_6 = PyTuple_New(4+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1359, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_10, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_back); + __Pyx_GIVEREF(__pyx_v_back); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_10, __pyx_v_back); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_10, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_6, 3+__pyx_t_10, __pyx_v_arg); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1359, __pyx_L171_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1356 + * return None if is_call else NO_FTRACE + * + * if back is not None: # <<<<<<<<<<<<<< + * # if we're in a return, we want it to appear to the user in the previous frame! + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + */ + goto __pyx_L267; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1362 + * else: + * # in jython we may not have a back frame + * info.pydev_step_stop = None # <<<<<<<<<<<<<< + * info.pydev_original_step_cmd = -1 + * info.pydev_step_cmd = -1 + */ + /*else*/ { + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":1363 + * # in jython we may not have a back frame + * info.pydev_step_stop = None + * info.pydev_original_step_cmd = -1 # <<<<<<<<<<<<<< + * info.pydev_step_cmd = -1 + * info.pydev_state = 1 + */ + __pyx_v_info->pydev_original_step_cmd = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1364 + * info.pydev_step_stop = None + * info.pydev_original_step_cmd = -1 + * info.pydev_step_cmd = -1 # <<<<<<<<<<<<<< + * info.pydev_state = 1 + * + */ + __pyx_v_info->pydev_step_cmd = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1365 + * info.pydev_original_step_cmd = -1 + * info.pydev_step_cmd = -1 + * info.pydev_state = 1 # <<<<<<<<<<<<<< + * + * except KeyboardInterrupt: + */ + __pyx_v_info->pydev_state = 1; + } + __pyx_L267:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1331 + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + * self.do_wait_suspend(thread, frame, event, arg) + * elif is_return: # return event # <<<<<<<<<<<<<< + * back = frame.f_back + * if back is not None: + */ + } + __pyx_L259:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1327 + * if plugin_stop: + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: # <<<<<<<<<<<<<< + * if is_line: + * self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + */ + } + __pyx_L258:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1146 + * + * # step handling. We stop when we hit the right frame + * try: # <<<<<<<<<<<<<< + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + */ + } + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_18); __pyx_t_18 = 0; + goto __pyx_L176_try_end; + __pyx_L171_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_21); __pyx_t_21 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1367 + * info.pydev_state = 1 + * + * except KeyboardInterrupt: # <<<<<<<<<<<<<< + * raise + * except: + */ + __pyx_t_10 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_KeyboardInterrupt); + if (__pyx_t_10) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_7, &__pyx_t_3, &__pyx_t_6) < 0) __PYX_ERR(0, 1367, __pyx_L173_except_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_6); + + /* "_pydevd_bundle/pydevd_cython.pyx":1368 + * + * except KeyboardInterrupt: + * raise # <<<<<<<<<<<<<< + * except: + * try: + */ + __Pyx_GIVEREF(__pyx_t_7); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_ErrRestoreWithState(__pyx_t_7, __pyx_t_3, __pyx_t_6); + __pyx_t_7 = 0; __pyx_t_3 = 0; __pyx_t_6 = 0; + __PYX_ERR(0, 1368, __pyx_L173_except_error) + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1369 + * except KeyboardInterrupt: + * raise + * except: # <<<<<<<<<<<<<< + * try: + * pydev_log.exception() + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_3, &__pyx_t_7) < 0) __PYX_ERR(0, 1369, __pyx_L173_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_7); + + /* "_pydevd_bundle/pydevd_cython.pyx":1370 + * raise + * except: + * try: # <<<<<<<<<<<<<< + * pydev_log.exception() + * info.pydev_original_step_cmd = -1 + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_28, &__pyx_t_27, &__pyx_t_26); + __Pyx_XGOTREF(__pyx_t_28); + __Pyx_XGOTREF(__pyx_t_27); + __Pyx_XGOTREF(__pyx_t_26); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1371 + * except: + * try: + * pydev_log.exception() # <<<<<<<<<<<<<< + * info.pydev_original_step_cmd = -1 + * info.pydev_step_cmd = -1 + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1371, __pyx_L272_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_exception); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1371, __pyx_L272_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_8 = (__pyx_t_4) ? __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_4) : __Pyx_PyObject_CallNoArg(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1371, __pyx_L272_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1372 + * try: + * pydev_log.exception() + * info.pydev_original_step_cmd = -1 # <<<<<<<<<<<<<< + * info.pydev_step_cmd = -1 + * info.pydev_step_stop = None + */ + __pyx_v_info->pydev_original_step_cmd = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1373 + * pydev_log.exception() + * info.pydev_original_step_cmd = -1 + * info.pydev_step_cmd = -1 # <<<<<<<<<<<<<< + * info.pydev_step_stop = None + * except: + */ + __pyx_v_info->pydev_step_cmd = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1374 + * info.pydev_original_step_cmd = -1 + * info.pydev_step_cmd = -1 + * info.pydev_step_stop = None # <<<<<<<<<<<<<< + * except: + * return None if is_call else NO_FTRACE + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":1370 + * raise + * except: + * try: # <<<<<<<<<<<<<< + * pydev_log.exception() + * info.pydev_original_step_cmd = -1 + */ + } + __Pyx_XDECREF(__pyx_t_28); __pyx_t_28 = 0; + __Pyx_XDECREF(__pyx_t_27); __pyx_t_27 = 0; + __Pyx_XDECREF(__pyx_t_26); __pyx_t_26 = 0; + goto __pyx_L279_try_end; + __pyx_L272_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_21); __pyx_t_21 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1375 + * info.pydev_step_cmd = -1 + * info.pydev_step_stop = None + * except: # <<<<<<<<<<<<<< + * return None if is_call else NO_FTRACE + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_1, &__pyx_t_4) < 0) __PYX_ERR(0, 1375, __pyx_L274_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_4); + + /* "_pydevd_bundle/pydevd_cython.pyx":1376 + * info.pydev_step_stop = None + * except: + * return None if is_call else NO_FTRACE # <<<<<<<<<<<<<< + * + * # if we are quitting, let's stop the tracing + */ + __Pyx_XDECREF(__pyx_r); + if ((__pyx_v_is_call != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_2 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_30, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_30)) __PYX_ERR(0, 1376, __pyx_L274_except_error) + __Pyx_GOTREF(__pyx_t_30); + __pyx_t_2 = __pyx_t_30; + __pyx_t_30 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L275_except_return; + } + __pyx_L274_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1370 + * raise + * except: + * try: # <<<<<<<<<<<<<< + * pydev_log.exception() + * info.pydev_original_step_cmd = -1 + */ + __Pyx_XGIVEREF(__pyx_t_28); + __Pyx_XGIVEREF(__pyx_t_27); + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_ExceptionReset(__pyx_t_28, __pyx_t_27, __pyx_t_26); + goto __pyx_L173_except_error; + __pyx_L275_except_return:; + __Pyx_XGIVEREF(__pyx_t_28); + __Pyx_XGIVEREF(__pyx_t_27); + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_ExceptionReset(__pyx_t_28, __pyx_t_27, __pyx_t_26); + goto __pyx_L174_except_return; + __pyx_L279_try_end:; + } + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L172_exception_handled; + } + __pyx_L173_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1146 + * + * # step handling. We stop when we hit the right frame + * try: # <<<<<<<<<<<<<< + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + */ + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + goto __pyx_L4_error; + __pyx_L175_try_return:; + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + goto __pyx_L3_return; + __pyx_L174_except_return:; + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + goto __pyx_L3_return; + __pyx_L172_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + __pyx_L176_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1379 + * + * # if we are quitting, let's stop the tracing + * if main_debugger.quitting: # <<<<<<<<<<<<<< + * return None if is_call else NO_FTRACE + * + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_quitting); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1379, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1379, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1380 + * # if we are quitting, let's stop the tracing + * if main_debugger.quitting: + * return None if is_call else NO_FTRACE # <<<<<<<<<<<<<< + * + * return self.trace_dispatch + */ + __Pyx_XDECREF(__pyx_r); + if ((__pyx_v_is_call != 0)) { + __Pyx_INCREF(Py_None); + __pyx_t_7 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1380, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = __pyx_t_3; + __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1379 + * + * # if we are quitting, let's stop the tracing + * if main_debugger.quitting: # <<<<<<<<<<<<<< + * return None if is_call else NO_FTRACE + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1382 + * return None if is_call else NO_FTRACE + * + * return self.trace_dispatch # <<<<<<<<<<<<<< + * finally: + * info.is_tracing -= 1 + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1382, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_r = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L3_return; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1384 + * return self.trace_dispatch + * finally: + * info.is_tracing -= 1 # <<<<<<<<<<<<<< + * + * # end trace_dispatch + */ + /*finally:*/ { + __pyx_L4_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_18 = 0; __pyx_t_17 = 0; __pyx_t_16 = 0; __pyx_t_26 = 0; __pyx_t_27 = 0; __pyx_t_28 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_21); __pyx_t_21 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_30); __pyx_t_30 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_26, &__pyx_t_27, &__pyx_t_28); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_18, &__pyx_t_17, &__pyx_t_16) < 0)) __Pyx_ErrFetch(&__pyx_t_18, &__pyx_t_17, &__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_18); + __Pyx_XGOTREF(__pyx_t_17); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_26); + __Pyx_XGOTREF(__pyx_t_27); + __Pyx_XGOTREF(__pyx_t_28); + __pyx_t_10 = __pyx_lineno; __pyx_t_5 = __pyx_clineno; __pyx_t_31 = __pyx_filename; + { + __pyx_v_info->is_tracing = (__pyx_v_info->is_tracing - 1); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_XGIVEREF(__pyx_t_27); + __Pyx_XGIVEREF(__pyx_t_28); + __Pyx_ExceptionReset(__pyx_t_26, __pyx_t_27, __pyx_t_28); + } + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ErrRestore(__pyx_t_18, __pyx_t_17, __pyx_t_16); + __pyx_t_18 = 0; __pyx_t_17 = 0; __pyx_t_16 = 0; __pyx_t_26 = 0; __pyx_t_27 = 0; __pyx_t_28 = 0; + __pyx_lineno = __pyx_t_10; __pyx_clineno = __pyx_t_5; __pyx_filename = __pyx_t_31; + goto __pyx_L1_error; + } + __pyx_L3_return: { + __pyx_t_28 = __pyx_r; + __pyx_r = 0; + __pyx_v_info->is_tracing = (__pyx_v_info->is_tracing - 1); + __pyx_r = __pyx_t_28; + __pyx_t_28 = 0; + goto __pyx_L0; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":700 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cpdef trace_dispatch(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef tuple abs_path_canonical_path_and_base; + * cdef bint is_exception_event; + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_21); + __Pyx_XDECREF(__pyx_t_30); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_abs_path_canonical_path_and_base); + __Pyx_XDECREF((PyObject *)__pyx_v_info); + __Pyx_XDECREF(__pyx_v_breakpoints_for_file); + __Pyx_XDECREF(__pyx_v_stop_info); + __Pyx_XDECREF(__pyx_v_curr_func_name); + __Pyx_XDECREF(__pyx_v_frame_skips_cache); + __Pyx_XDECREF(__pyx_v_frame_cache_key); + __Pyx_XDECREF(__pyx_v_line_cache_key); + __Pyx_XDECREF(__pyx_v_bp); + __Pyx_XDECREF(__pyx_v_pydev_smart_step_into_variants); + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_thread); + __Pyx_XDECREF(__pyx_v_plugin_manager); + __Pyx_XDECREF(__pyx_v_stop_frame); + __Pyx_XDECREF(__pyx_v_function_breakpoint_on_call_event); + __Pyx_XDECREF(__pyx_v_returns_cache_key); + __Pyx_XDECREF(__pyx_v_return_lines); + __Pyx_XDECREF(__pyx_v_x); + __Pyx_XDECREF(__pyx_v_f); + __Pyx_XDECREF(__pyx_v_func_lines); + __Pyx_XDECREF(__pyx_v_offset_and_lineno); + __Pyx_XDECREF(__pyx_v_flag); + __Pyx_XDECREF(__pyx_v_breakpoint); + __Pyx_XDECREF(__pyx_v_stop_reason); + __Pyx_XDECREF(__pyx_v_bp_type); + __Pyx_XDECREF(__pyx_v_new_frame); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XDECREF(__pyx_v_cmd); + __Pyx_XDECREF(__pyx_v_eval_result); + __Pyx_XDECREF(__pyx_v_plugin_stop); + __Pyx_XDECREF(__pyx_v_force_check_project_scope); + __Pyx_XDECREF(__pyx_v_filename); + __Pyx_XDECREF(__pyx_v_f2); + __Pyx_XDECREF(__pyx_v_back); + __Pyx_XDECREF(__pyx_v_smart_step_into_variant); + __Pyx_XDECREF(__pyx_v_children_variants); + __Pyx_XDECREF(__pyx_v_f_code); + __Pyx_XDECREF(__pyx_v_stopped_on_plugin); + __Pyx_XDECREF(__pyx_v_back_absolute_filename); + __Pyx_XDECREF(__pyx_v__); + __Pyx_XDECREF(__pyx_v_base); + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_11trace_dispatch(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_11trace_dispatch(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("trace_dispatch (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 3, 3, 1); __PYX_ERR(0, 700, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 3, 3, 2); __PYX_ERR(0, 700, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "trace_dispatch") < 0)) __PYX_ERR(0, 700, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = ((PyObject*)values[1]); + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 700, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_event), (&PyString_Type), 1, "event", 1))) __PYX_ERR(0, 700, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_10trace_dispatch(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_10trace_dispatch(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("trace_dispatch", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispatch(__pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_13__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_13__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_12__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_12__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self._args, self.exc_info, self.should_skip) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->should_skip); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_self->exc_info); + __Pyx_GIVEREF(__pyx_v_self->exc_info); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_self->exc_info); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self._args, self.exc_info, self.should_skip) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_2 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v__dict = __pyx_t_2; + __pyx_t_2 = 0; + + /* "(tree fragment)":7 + * state = (self._args, self.exc_info, self.should_skip) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_3 = (__pyx_v__dict != Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v__dict); + __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self._args is not None or self.exc_info is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self._args, self.exc_info, self.should_skip) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self._args is not None or self.exc_info is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, None), state + */ + /*else*/ { + __pyx_t_3 = (__pyx_v_self->_args != ((PyObject*)Py_None)); + __pyx_t_5 = (__pyx_t_3 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_4 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->exc_info != Py_None); + __pyx_t_3 = (__pyx_t_5 != 0); + __pyx_t_4 = __pyx_t_3; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_4; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._args is not None or self.exc_info is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, None), state + * else: + */ + __pyx_t_4 = (__pyx_v_use_setstate != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":13 + * use_setstate = self._args is not None or self.exc_info is not None + * if use_setstate: + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pyx_unpickle_PyDBFrame); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_84338306); + __Pyx_GIVEREF(__pyx_int_84338306); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_84338306); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_2, 2, Py_None); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_2); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._args is not None or self.exc_info is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, None), state + * else: + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_PyDBFrame__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle_PyDBFrame); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_84338306); + __Pyx_GIVEREF(__pyx_int_84338306); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_84338306); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2); + __pyx_t_6 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBFrame__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_15__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_15__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_14__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_14__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_PyDBFrame__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBFrame__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_PyDBFrame, (type(self), 0x506e682, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBFrame__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1422 + * + * + * def notify_skipped_step_in_because_of_filters(py_db, frame): # <<<<<<<<<<<<<< + * global _global_notify_skipped_step_in + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_5notify_skipped_step_in_because_of_filters(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_5notify_skipped_step_in_because_of_filters = {"notify_skipped_step_in_because_of_filters", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_5notify_skipped_step_in_because_of_filters, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_5notify_skipped_step_in_because_of_filters(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_py_db = 0; + PyObject *__pyx_v_frame = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("notify_skipped_step_in_because_of_filters (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_py_db,&__pyx_n_s_frame,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_py_db)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("notify_skipped_step_in_because_of_filters", 1, 2, 2, 1); __PYX_ERR(0, 1422, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "notify_skipped_step_in_because_of_filters") < 0)) __PYX_ERR(0, 1422, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_py_db = values[0]; + __pyx_v_frame = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("notify_skipped_step_in_because_of_filters", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1422, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.notify_skipped_step_in_because_of_filters", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_4notify_skipped_step_in_because_of_filters(__pyx_self, __pyx_v_py_db, __pyx_v_frame); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_4notify_skipped_step_in_because_of_filters(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_py_db, PyObject *__pyx_v_frame) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + PyObject *__pyx_t_10 = NULL; + int __pyx_t_11; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("notify_skipped_step_in_because_of_filters", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1425 + * global _global_notify_skipped_step_in + * + * with _global_notify_skipped_step_in_lock: # <<<<<<<<<<<<<< + * if _global_notify_skipped_step_in: + * # Check with lock in place (callers should actually have checked + */ + /*with:*/ { + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_global_notify_skipped_step_in_l); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1425, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_LookupSpecial(__pyx_t_1, __pyx_n_s_exit); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1425, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_LookupSpecial(__pyx_t_1, __pyx_n_s_enter); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1425, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallNoArg(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1425, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + /*try:*/ { + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_6, &__pyx_t_7, &__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_8); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1426 + * + * with _global_notify_skipped_step_in_lock: + * if _global_notify_skipped_step_in: # <<<<<<<<<<<<<< + * # Check with lock in place (callers should actually have checked + * # before without the lock in place due to performance). + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_14_pydevd_bundle_13pydevd_cython__global_notify_skipped_step_in); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1426, __pyx_L7_error) + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1429 + * # Check with lock in place (callers should actually have checked + * # before without the lock in place due to performance). + * return # <<<<<<<<<<<<<< + * _global_notify_skipped_step_in = True + * py_db.notify_skipped_step_in_because_of_filters(frame) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L11_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1426 + * + * with _global_notify_skipped_step_in_lock: + * if _global_notify_skipped_step_in: # <<<<<<<<<<<<<< + * # Check with lock in place (callers should actually have checked + * # before without the lock in place due to performance). + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1430 + * # before without the lock in place due to performance). + * return + * _global_notify_skipped_step_in = True # <<<<<<<<<<<<<< + * py_db.notify_skipped_step_in_because_of_filters(frame) + * + */ + __Pyx_INCREF(Py_True); + __Pyx_XGOTREF(__pyx_v_14_pydevd_bundle_13pydevd_cython__global_notify_skipped_step_in); + __Pyx_DECREF_SET(__pyx_v_14_pydevd_bundle_13pydevd_cython__global_notify_skipped_step_in, ((PyObject*)Py_True)); + __Pyx_GIVEREF(Py_True); + + /* "_pydevd_bundle/pydevd_cython.pyx":1431 + * return + * _global_notify_skipped_step_in = True + * py_db.notify_skipped_step_in_because_of_filters(frame) # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_notify_skipped_step_in_because_o); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1431, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_frame) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_frame); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1431, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1425 + * global _global_notify_skipped_step_in + * + * with _global_notify_skipped_step_in_lock: # <<<<<<<<<<<<<< + * if _global_notify_skipped_step_in: + * # Check with lock in place (callers should actually have checked + */ + } + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L12_try_end; + __pyx_L7_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.notify_skipped_step_in_because_of_filters", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_3, &__pyx_t_4) < 0) __PYX_ERR(0, 1425, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PyTuple_Pack(3, __pyx_t_1, __pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1425, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 1425, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_10); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if (__pyx_t_9 < 0) __PYX_ERR(0, 1425, __pyx_L9_except_error) + __pyx_t_11 = ((!(__pyx_t_9 != 0)) != 0); + if (__pyx_t_11) { + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ErrRestoreWithState(__pyx_t_1, __pyx_t_3, __pyx_t_4); + __pyx_t_1 = 0; __pyx_t_3 = 0; __pyx_t_4 = 0; + __PYX_ERR(0, 1425, __pyx_L9_except_error) + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L8_exception_handled; + } + __pyx_L9_except_error:; + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_ExceptionReset(__pyx_t_6, __pyx_t_7, __pyx_t_8); + goto __pyx_L1_error; + __pyx_L11_try_return:; + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_ExceptionReset(__pyx_t_6, __pyx_t_7, __pyx_t_8); + goto __pyx_L4_return; + __pyx_L8_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_ExceptionReset(__pyx_t_6, __pyx_t_7, __pyx_t_8); + __pyx_L12_try_end:; + } + } + /*finally:*/ { + /*normal exit:*/{ + if (__pyx_t_2) { + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__2, NULL); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1425, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + goto __pyx_L6; + } + __pyx_L4_return: { + __pyx_t_8 = __pyx_r; + __pyx_r = 0; + if (__pyx_t_2) { + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__2, NULL); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1425, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L0; + } + __pyx_L6:; + } + goto __pyx_L17; + __pyx_L3_error:; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + goto __pyx_L1_error; + __pyx_L17:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1422 + * + * + * def notify_skipped_step_in_because_of_filters(py_db, frame): # <<<<<<<<<<<<<< + * global _global_notify_skipped_step_in + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.notify_skipped_step_in_because_of_filters", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1436 + * cdef class SafeCallWrapper: + * cdef method_object + * def __init__(self, method_object): # <<<<<<<<<<<<<< + * self.method_object = method_object + * def __call__(self, *args): + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_method_object = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_method_object,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_method_object)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 1436, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_method_object = values[0]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1436, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.SafeCallWrapper.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_self), __pyx_v_method_object); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self, PyObject *__pyx_v_method_object) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1437 + * cdef method_object + * def __init__(self, method_object): + * self.method_object = method_object # <<<<<<<<<<<<<< + * def __call__(self, *args): + * #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + */ + __Pyx_INCREF(__pyx_v_method_object); + __Pyx_GIVEREF(__pyx_v_method_object); + __Pyx_GOTREF(__pyx_v_self->method_object); + __Pyx_DECREF(__pyx_v_self->method_object); + __pyx_v_self->method_object = __pyx_v_method_object; + + /* "_pydevd_bundle/pydevd_cython.pyx":1436 + * cdef class SafeCallWrapper: + * cdef method_object + * def __init__(self, method_object): # <<<<<<<<<<<<<< + * self.method_object = method_object + * def __call__(self, *args): + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1438 + * def __init__(self, method_object): + * self.method_object = method_object + * def __call__(self, *args): # <<<<<<<<<<<<<< + * #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + * #in the frame, and that reference might get destroyed by set trace on frame and parents + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__call__", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_2__call__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_self), __pyx_v_args); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_2__call__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self, PyObject *__pyx_v_args) { + PyObject *__pyx_v_method_obj; + PyObject *__pyx_v_ret = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__call__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1441 + * #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + * #in the frame, and that reference might get destroyed by set trace on frame and parents + * cdef PyObject* method_obj = self.method_object # <<<<<<<<<<<<<< + * Py_INCREF(method_obj) + * ret = (method_obj)(*args) + */ + __pyx_v_method_obj = ((PyObject *)__pyx_v_self->method_object); + + /* "_pydevd_bundle/pydevd_cython.pyx":1442 + * #in the frame, and that reference might get destroyed by set trace on frame and parents + * cdef PyObject* method_obj = self.method_object + * Py_INCREF(method_obj) # <<<<<<<<<<<<<< + * ret = (method_obj)(*args) + * Py_XDECREF (method_obj) + */ + Py_INCREF(((PyObject *)__pyx_v_method_obj)); + + /* "_pydevd_bundle/pydevd_cython.pyx":1443 + * cdef PyObject* method_obj = self.method_object + * Py_INCREF(method_obj) + * ret = (method_obj)(*args) # <<<<<<<<<<<<<< + * Py_XDECREF (method_obj) + * return SafeCallWrapper(ret) if ret is not None else None + */ + __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_v_method_obj), __pyx_v_args, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1443, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_ret = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1444 + * Py_INCREF(method_obj) + * ret = (method_obj)(*args) + * Py_XDECREF (method_obj) # <<<<<<<<<<<<<< + * return SafeCallWrapper(ret) if ret is not None else None + * def get_method_object(self): + */ + Py_XDECREF(__pyx_v_method_obj); + + /* "_pydevd_bundle/pydevd_cython.pyx":1445 + * ret = (method_obj)(*args) + * Py_XDECREF (method_obj) + * return SafeCallWrapper(ret) if ret is not None else None # <<<<<<<<<<<<<< + * def get_method_object(self): + * return self.method_object + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = (__pyx_v_ret != Py_None); + if ((__pyx_t_2 != 0)) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), __pyx_v_ret); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1445, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __pyx_t_3; + __pyx_t_3 = 0; + } else { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1438 + * def __init__(self, method_object): + * self.method_object = method_object + * def __call__(self, *args): # <<<<<<<<<<<<<< + * #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + * #in the frame, and that reference might get destroyed by set trace on frame and parents + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.SafeCallWrapper.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1446 + * Py_XDECREF (method_obj) + * return SafeCallWrapper(ret) if ret is not None else None + * def get_method_object(self): # <<<<<<<<<<<<<< + * return self.method_object + * # ELSE + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_5get_method_object(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_5get_method_object(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_method_object (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_4get_method_object(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_4get_method_object(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_method_object", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1447 + * return SafeCallWrapper(ret) if ret is not None else None + * def get_method_object(self): + * return self.method_object # <<<<<<<<<<<<<< + * # ELSE + * # ENDIF + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->method_object); + __pyx_r = __pyx_v_self->method_object; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1446 + * Py_XDECREF (method_obj) + * return SafeCallWrapper(ret) if ret is not None else None + * def get_method_object(self): # <<<<<<<<<<<<<< + * return self.method_object + * # ELSE + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_6__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_6__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.method_object,) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->method_object); + __Pyx_GIVEREF(__pyx_v_self->method_object); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->method_object); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.method_object,) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self.method_object,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); + __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.method_object is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.method_object,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.method_object is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state + */ + /*else*/ { + __pyx_t_3 = (__pyx_v_self->method_object != Py_None); + __pyx_v_use_setstate = __pyx_t_3; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.method_object is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state + * else: + */ + __pyx_t_3 = (__pyx_v_use_setstate != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":13 + * use_setstate = self.method_object is not None + * if use_setstate: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_SafeCallWrapper); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_125568891); + __Pyx_GIVEREF(__pyx_int_125568891); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_125568891); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.method_object is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state + * else: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_SafeCallWrapper__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle_SafeCallWrapper); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_125568891); + __Pyx_GIVEREF(__pyx_int_125568891); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_125568891); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __pyx_t_5 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.SafeCallWrapper.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_SafeCallWrapper__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_8__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_8__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_SafeCallWrapper__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_SafeCallWrapper__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_SafeCallWrapper__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.SafeCallWrapper.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1452 + * + * + * def fix_top_level_trace_and_get_trace_func(py_db, frame): # <<<<<<<<<<<<<< + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef str filename; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_7fix_top_level_trace_and_get_trace_func(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_7fix_top_level_trace_and_get_trace_func = {"fix_top_level_trace_and_get_trace_func", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_7fix_top_level_trace_and_get_trace_func, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_7fix_top_level_trace_and_get_trace_func(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_py_db = 0; + PyObject *__pyx_v_frame = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("fix_top_level_trace_and_get_trace_func (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_py_db,&__pyx_n_s_frame,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_py_db)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("fix_top_level_trace_and_get_trace_func", 1, 2, 2, 1); __PYX_ERR(0, 1452, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "fix_top_level_trace_and_get_trace_func") < 0)) __PYX_ERR(0, 1452, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_py_db = values[0]; + __pyx_v_frame = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("fix_top_level_trace_and_get_trace_func", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1452, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.fix_top_level_trace_and_get_trace_func", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_6fix_top_level_trace_and_get_trace_func(__pyx_self, __pyx_v_py_db, __pyx_v_frame); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_6fix_top_level_trace_and_get_trace_func(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_py_db, PyObject *__pyx_v_frame) { + PyObject *__pyx_v_name = 0; + PyObject *__pyx_v_args = 0; + PyObject *__pyx_v_thread = NULL; + PyObject *__pyx_v_f_unhandled = NULL; + int __pyx_v_force_only_unhandled_tracer; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_v_j = NULL; + PyObject *__pyx_v_t = NULL; + PyObject *__pyx_v_additional_info = NULL; + PyObject *__pyx_v_top_level_thread_tracer = NULL; + PyObject *__pyx_v_f_trace = NULL; + PyObject *__pyx_v_thread_tracer = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + Py_ssize_t __pyx_t_5; + Py_ssize_t __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + int __pyx_t_15; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("fix_top_level_trace_and_get_trace_func", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1463 + * # where more information is cached (and will also setup the tracing for + * # frames where we should deal with unhandled exceptions). + * thread = None # <<<<<<<<<<<<<< + * # Cache the frame which should be traced to deal with unhandled exceptions. + * # (i.e.: thread entry-points). + */ + __Pyx_INCREF(Py_None); + __pyx_v_thread = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":1467 + * # (i.e.: thread entry-points). + * + * f_unhandled = frame # <<<<<<<<<<<<<< + * # print('called at', f_unhandled.f_code.co_name, f_unhandled.f_code.co_filename, f_unhandled.f_code.co_firstlineno) + * force_only_unhandled_tracer = False + */ + __Pyx_INCREF(__pyx_v_frame); + __pyx_v_f_unhandled = __pyx_v_frame; + + /* "_pydevd_bundle/pydevd_cython.pyx":1469 + * f_unhandled = frame + * # print('called at', f_unhandled.f_code.co_name, f_unhandled.f_code.co_filename, f_unhandled.f_code.co_firstlineno) + * force_only_unhandled_tracer = False # <<<<<<<<<<<<<< + * while f_unhandled is not None: + * # name = splitext(basename(f_unhandled.f_code.co_filename))[0] + */ + __pyx_v_force_only_unhandled_tracer = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1470 + * # print('called at', f_unhandled.f_code.co_name, f_unhandled.f_code.co_filename, f_unhandled.f_code.co_firstlineno) + * force_only_unhandled_tracer = False + * while f_unhandled is not None: # <<<<<<<<<<<<<< + * # name = splitext(basename(f_unhandled.f_code.co_filename))[0] + * + */ + while (1) { + __pyx_t_1 = (__pyx_v_f_unhandled != Py_None); + __pyx_t_2 = (__pyx_t_1 != 0); + if (!__pyx_t_2) break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1473 + * # name = splitext(basename(f_unhandled.f_code.co_filename))[0] + * + * name = f_unhandled.f_code.co_filename # <<<<<<<<<<<<<< + * # basename + * i = name.rfind('/') + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1473, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1473, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(0, 1473, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_name, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1475 + * name = f_unhandled.f_code.co_filename + * # basename + * i = name.rfind('/') # <<<<<<<<<<<<<< + * j = name.rfind('\\') + * if j > i: + */ + __pyx_t_4 = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyString_Type_rfind, __pyx_v_name, __pyx_kp_s__5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1475, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1476 + * # basename + * i = name.rfind('/') + * j = name.rfind('\\') # <<<<<<<<<<<<<< + * if j > i: + * i = j + */ + __pyx_t_4 = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyString_Type_rfind, __pyx_v_name, __pyx_kp_s__6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1476, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_j, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1477 + * i = name.rfind('/') + * j = name.rfind('\\') + * if j > i: # <<<<<<<<<<<<<< + * i = j + * if i >= 0: + */ + __pyx_t_4 = PyObject_RichCompare(__pyx_v_j, __pyx_v_i, Py_GT); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1477, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1477, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1478 + * j = name.rfind('\\') + * if j > i: + * i = j # <<<<<<<<<<<<<< + * if i >= 0: + * name = name[i + 1:] + */ + __Pyx_INCREF(__pyx_v_j); + __Pyx_DECREF_SET(__pyx_v_i, __pyx_v_j); + + /* "_pydevd_bundle/pydevd_cython.pyx":1477 + * i = name.rfind('/') + * j = name.rfind('\\') + * if j > i: # <<<<<<<<<<<<<< + * i = j + * if i >= 0: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1479 + * if j > i: + * i = j + * if i >= 0: # <<<<<<<<<<<<<< + * name = name[i + 1:] + * # remove ext + */ + __pyx_t_4 = PyObject_RichCompare(__pyx_v_i, __pyx_int_0, Py_GE); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1479, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1479, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1480 + * i = j + * if i >= 0: + * name = name[i + 1:] # <<<<<<<<<<<<<< + * # remove ext + * i = name.rfind('.') + */ + if (unlikely(__pyx_v_name == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1480, __pyx_L1_error) + } + __pyx_t_4 = __Pyx_PyInt_AddObjC(__pyx_v_i, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1480, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_2 = (__pyx_t_4 == Py_None); + if (__pyx_t_2) { + __pyx_t_5 = 0; + } else { + __pyx_t_6 = __Pyx_PyIndex_AsSsize_t(__pyx_t_4); if (unlikely((__pyx_t_6 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 1480, __pyx_L1_error) + __pyx_t_5 = __pyx_t_6; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PySequence_GetSlice(__pyx_v_name, __pyx_t_5, PY_SSIZE_T_MAX); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1480, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_name, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1479 + * if j > i: + * i = j + * if i >= 0: # <<<<<<<<<<<<<< + * name = name[i + 1:] + * # remove ext + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1482 + * name = name[i + 1:] + * # remove ext + * i = name.rfind('.') # <<<<<<<<<<<<<< + * if i >= 0: + * name = name[:i] + */ + __pyx_t_4 = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyString_Type_rfind, __pyx_v_name, __pyx_kp_s__7); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1482, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_i, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1483 + * # remove ext + * i = name.rfind('.') + * if i >= 0: # <<<<<<<<<<<<<< + * name = name[:i] + * + */ + __pyx_t_4 = PyObject_RichCompare(__pyx_v_i, __pyx_int_0, Py_GE); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1483, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1483, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1484 + * i = name.rfind('.') + * if i >= 0: + * name = name[:i] # <<<<<<<<<<<<<< + * + * if name == 'threading': + */ + if (unlikely(__pyx_v_name == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1484, __pyx_L1_error) + } + __Pyx_INCREF(__pyx_v_i); + __pyx_t_4 = __pyx_v_i; + __pyx_t_2 = (__pyx_t_4 == Py_None); + if (__pyx_t_2) { + __pyx_t_5 = PY_SSIZE_T_MAX; + } else { + __pyx_t_6 = __Pyx_PyIndex_AsSsize_t(__pyx_t_4); if (unlikely((__pyx_t_6 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 1484, __pyx_L1_error) + __pyx_t_5 = __pyx_t_6; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PySequence_GetSlice(__pyx_v_name, 0, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_name, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1483 + * # remove ext + * i = name.rfind('.') + * if i >= 0: # <<<<<<<<<<<<<< + * name = name[:i] + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1486 + * name = name[:i] + * + * if name == 'threading': # <<<<<<<<<<<<<< + * if f_unhandled.f_code.co_name in ('__bootstrap', '_bootstrap'): + * # We need __bootstrap_inner, not __bootstrap. + */ + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_name, __pyx_n_s_threading, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1486, __pyx_L1_error) + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1487 + * + * if name == 'threading': + * if f_unhandled.f_code.co_name in ('__bootstrap', '_bootstrap'): # <<<<<<<<<<<<<< + * # We need __bootstrap_inner, not __bootstrap. + * return None, False + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_code); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1487, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_co_name); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1487, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_t_3, __pyx_n_s_bootstrap, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1487, __pyx_L1_error) + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L10_bool_binop_done; + } + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_t_3, __pyx_n_s_bootstrap_2, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1487, __pyx_L1_error) + __pyx_t_1 = __pyx_t_2; + __pyx_L10_bool_binop_done:; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1489 + * if f_unhandled.f_code.co_name in ('__bootstrap', '_bootstrap'): + * # We need __bootstrap_inner, not __bootstrap. + * return None, False # <<<<<<<<<<<<<< + * + * elif f_unhandled.f_code.co_name in ('__bootstrap_inner', '_bootstrap_inner'): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_tuple__8); + __pyx_r = __pyx_tuple__8; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1487 + * + * if name == 'threading': + * if f_unhandled.f_code.co_name in ('__bootstrap', '_bootstrap'): # <<<<<<<<<<<<<< + * # We need __bootstrap_inner, not __bootstrap. + * return None, False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1491 + * return None, False + * + * elif f_unhandled.f_code.co_name in ('__bootstrap_inner', '_bootstrap_inner'): # <<<<<<<<<<<<<< + * # Note: be careful not to use threading.currentThread to avoid creating a dummy thread. + * t = f_unhandled.f_locals.get('self') + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1491, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_name); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1491, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_bootstrap_inner, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1491, __pyx_L1_error) + if (!__pyx_t_1) { + } else { + __pyx_t_2 = __pyx_t_1; + goto __pyx_L12_bool_binop_done; + } + __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_bootstrap_inner_2, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1491, __pyx_L1_error) + __pyx_t_2 = __pyx_t_1; + __pyx_L12_bool_binop_done:; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1493 + * elif f_unhandled.f_code.co_name in ('__bootstrap_inner', '_bootstrap_inner'): + * # Note: be careful not to use threading.currentThread to avoid creating a dummy thread. + * t = f_unhandled.f_locals.get('self') # <<<<<<<<<<<<<< + * force_only_unhandled_tracer = True + * if t is not None and isinstance(t, threading.Thread): + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_locals); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1493, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_get); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1493, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_4 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_3, __pyx_n_s_self) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_n_s_self); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1493, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1494 + * # Note: be careful not to use threading.currentThread to avoid creating a dummy thread. + * t = f_unhandled.f_locals.get('self') + * force_only_unhandled_tracer = True # <<<<<<<<<<<<<< + * if t is not None and isinstance(t, threading.Thread): + * thread = t + */ + __pyx_v_force_only_unhandled_tracer = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1495 + * t = f_unhandled.f_locals.get('self') + * force_only_unhandled_tracer = True + * if t is not None and isinstance(t, threading.Thread): # <<<<<<<<<<<<<< + * thread = t + * break + */ + __pyx_t_2 = (__pyx_v_t != Py_None); + __pyx_t_8 = (__pyx_t_2 != 0); + if (__pyx_t_8) { + } else { + __pyx_t_1 = __pyx_t_8; + goto __pyx_L15_bool_binop_done; + } + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_threading); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1495, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_Thread); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1495, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_8 = PyObject_IsInstance(__pyx_v_t, __pyx_t_7); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 1495, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_2 = (__pyx_t_8 != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L15_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1496 + * force_only_unhandled_tracer = True + * if t is not None and isinstance(t, threading.Thread): + * thread = t # <<<<<<<<<<<<<< + * break + * + */ + __Pyx_INCREF(__pyx_v_t); + __Pyx_DECREF_SET(__pyx_v_thread, __pyx_v_t); + + /* "_pydevd_bundle/pydevd_cython.pyx":1497 + * if t is not None and isinstance(t, threading.Thread): + * thread = t + * break # <<<<<<<<<<<<<< + * + * elif name == 'pydev_monkey': + */ + goto __pyx_L4_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1495 + * t = f_unhandled.f_locals.get('self') + * force_only_unhandled_tracer = True + * if t is not None and isinstance(t, threading.Thread): # <<<<<<<<<<<<<< + * thread = t + * break + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1491 + * return None, False + * + * elif f_unhandled.f_code.co_name in ('__bootstrap_inner', '_bootstrap_inner'): # <<<<<<<<<<<<<< + * # Note: be careful not to use threading.currentThread to avoid creating a dummy thread. + * t = f_unhandled.f_locals.get('self') + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1486 + * name = name[:i] + * + * if name == 'threading': # <<<<<<<<<<<<<< + * if f_unhandled.f_code.co_name in ('__bootstrap', '_bootstrap'): + * # We need __bootstrap_inner, not __bootstrap. + */ + goto __pyx_L8; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1499 + * break + * + * elif name == 'pydev_monkey': # <<<<<<<<<<<<<< + * if f_unhandled.f_code.co_name == '__call__': + * force_only_unhandled_tracer = True + */ + __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_name, __pyx_n_s_pydev_monkey, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1499, __pyx_L1_error) + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1500 + * + * elif name == 'pydev_monkey': + * if f_unhandled.f_code.co_name == '__call__': # <<<<<<<<<<<<<< + * force_only_unhandled_tracer = True + * break + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_code); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1500, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_co_name); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1500, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_call_2, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1500, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1501 + * elif name == 'pydev_monkey': + * if f_unhandled.f_code.co_name == '__call__': + * force_only_unhandled_tracer = True # <<<<<<<<<<<<<< + * break + * + */ + __pyx_v_force_only_unhandled_tracer = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1502 + * if f_unhandled.f_code.co_name == '__call__': + * force_only_unhandled_tracer = True + * break # <<<<<<<<<<<<<< + * + * elif name == 'pydevd': + */ + goto __pyx_L4_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1500 + * + * elif name == 'pydev_monkey': + * if f_unhandled.f_code.co_name == '__call__': # <<<<<<<<<<<<<< + * force_only_unhandled_tracer = True + * break + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1499 + * break + * + * elif name == 'pydev_monkey': # <<<<<<<<<<<<<< + * if f_unhandled.f_code.co_name == '__call__': + * force_only_unhandled_tracer = True + */ + goto __pyx_L8; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1504 + * break + * + * elif name == 'pydevd': # <<<<<<<<<<<<<< + * if f_unhandled.f_code.co_name in ('run', 'main'): + * # We need to get to _exec + */ + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_name, __pyx_n_s_pydevd, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1504, __pyx_L1_error) + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1505 + * + * elif name == 'pydevd': + * if f_unhandled.f_code.co_name in ('run', 'main'): # <<<<<<<<<<<<<< + * # We need to get to _exec + * return None, False + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_code); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1505, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_co_name); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1505, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_t_7, __pyx_n_s_run, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1505, __pyx_L1_error) + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L19_bool_binop_done; + } + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_t_7, __pyx_n_s_main, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1505, __pyx_L1_error) + __pyx_t_1 = __pyx_t_2; + __pyx_L19_bool_binop_done:; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1507 + * if f_unhandled.f_code.co_name in ('run', 'main'): + * # We need to get to _exec + * return None, False # <<<<<<<<<<<<<< + * + * if f_unhandled.f_code.co_name == '_exec': + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_tuple__8); + __pyx_r = __pyx_tuple__8; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1505 + * + * elif name == 'pydevd': + * if f_unhandled.f_code.co_name in ('run', 'main'): # <<<<<<<<<<<<<< + * # We need to get to _exec + * return None, False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1509 + * return None, False + * + * if f_unhandled.f_code.co_name == '_exec': # <<<<<<<<<<<<<< + * force_only_unhandled_tracer = True + * break + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_code); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1509, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_co_name); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1509, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_exec, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1509, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1510 + * + * if f_unhandled.f_code.co_name == '_exec': + * force_only_unhandled_tracer = True # <<<<<<<<<<<<<< + * break + * + */ + __pyx_v_force_only_unhandled_tracer = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1511 + * if f_unhandled.f_code.co_name == '_exec': + * force_only_unhandled_tracer = True + * break # <<<<<<<<<<<<<< + * + * elif name == 'pydevd_tracing': + */ + goto __pyx_L4_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1509 + * return None, False + * + * if f_unhandled.f_code.co_name == '_exec': # <<<<<<<<<<<<<< + * force_only_unhandled_tracer = True + * break + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1504 + * break + * + * elif name == 'pydevd': # <<<<<<<<<<<<<< + * if f_unhandled.f_code.co_name in ('run', 'main'): + * # We need to get to _exec + */ + goto __pyx_L8; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1513 + * break + * + * elif name == 'pydevd_tracing': # <<<<<<<<<<<<<< + * return None, False + * + */ + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_name, __pyx_n_s_pydevd_tracing, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1513, __pyx_L1_error) + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1514 + * + * elif name == 'pydevd_tracing': + * return None, False # <<<<<<<<<<<<<< + * + * elif f_unhandled.f_back is None: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_tuple__8); + __pyx_r = __pyx_tuple__8; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1513 + * break + * + * elif name == 'pydevd_tracing': # <<<<<<<<<<<<<< + * return None, False + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1516 + * return None, False + * + * elif f_unhandled.f_back is None: # <<<<<<<<<<<<<< + * break + * + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1516, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = (__pyx_t_4 == Py_None); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1517 + * + * elif f_unhandled.f_back is None: + * break # <<<<<<<<<<<<<< + * + * f_unhandled = f_unhandled.f_back + */ + goto __pyx_L4_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":1516 + * return None, False + * + * elif f_unhandled.f_back is None: # <<<<<<<<<<<<<< + * break + * + */ + } + __pyx_L8:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1519 + * break + * + * f_unhandled = f_unhandled.f_back # <<<<<<<<<<<<<< + * + * if thread is None: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1519, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_f_unhandled, __pyx_t_4); + __pyx_t_4 = 0; + } + __pyx_L4_break:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1521 + * f_unhandled = f_unhandled.f_back + * + * if thread is None: # <<<<<<<<<<<<<< + * # Important: don't call threadingCurrentThread if we're in the threading module + * # to avoid creating dummy threads. + */ + __pyx_t_2 = (__pyx_v_thread == Py_None); + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1524 + * # Important: don't call threadingCurrentThread if we're in the threading module + * # to avoid creating dummy threads. + * if py_db.threading_get_ident is not None: # <<<<<<<<<<<<<< + * thread = py_db.threading_active.get(py_db.threading_get_ident()) + * if thread is None: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_threading_get_ident); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1524, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = (__pyx_t_4 != Py_None); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1525 + * # to avoid creating dummy threads. + * if py_db.threading_get_ident is not None: + * thread = py_db.threading_active.get(py_db.threading_get_ident()) # <<<<<<<<<<<<<< + * if thread is None: + * return None, False + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_threading_active); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1525, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_get); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1525, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_threading_get_ident); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1525, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + } + } + __pyx_t_7 = (__pyx_t_10) ? __Pyx_PyObject_CallOneArg(__pyx_t_9, __pyx_t_10) : __Pyx_PyObject_CallNoArg(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1525, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_4 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1525, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_thread, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1526 + * if py_db.threading_get_ident is not None: + * thread = py_db.threading_active.get(py_db.threading_get_ident()) + * if thread is None: # <<<<<<<<<<<<<< + * return None, False + * else: + */ + __pyx_t_2 = (__pyx_v_thread == Py_None); + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1527 + * thread = py_db.threading_active.get(py_db.threading_get_ident()) + * if thread is None: + * return None, False # <<<<<<<<<<<<<< + * else: + * # Jython does not have threading.get_ident(). + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_tuple__8); + __pyx_r = __pyx_tuple__8; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1526 + * if py_db.threading_get_ident is not None: + * thread = py_db.threading_active.get(py_db.threading_get_ident()) + * if thread is None: # <<<<<<<<<<<<<< + * return None, False + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1524 + * # Important: don't call threadingCurrentThread if we're in the threading module + * # to avoid creating dummy threads. + * if py_db.threading_get_ident is not None: # <<<<<<<<<<<<<< + * thread = py_db.threading_active.get(py_db.threading_get_ident()) + * if thread is None: + */ + goto __pyx_L23; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1530 + * else: + * # Jython does not have threading.get_ident(). + * thread = py_db.threading_current_thread() # <<<<<<<<<<<<<< + * + * if getattr(thread, 'pydev_do_not_trace', None): + */ + /*else*/ { + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_threading_current_thread); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1530, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_7) : __Pyx_PyObject_CallNoArg(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1530, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_thread, __pyx_t_4); + __pyx_t_4 = 0; + } + __pyx_L23:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1521 + * f_unhandled = f_unhandled.f_back + * + * if thread is None: # <<<<<<<<<<<<<< + * # Important: don't call threadingCurrentThread if we're in the threading module + * # to avoid creating dummy threads. + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1532 + * thread = py_db.threading_current_thread() + * + * if getattr(thread, 'pydev_do_not_trace', None): # <<<<<<<<<<<<<< + * py_db.disable_tracing() + * return None, False + */ + __pyx_t_4 = __Pyx_GetAttr3(__pyx_v_thread, __pyx_n_s_pydev_do_not_trace, Py_None); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1532, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1532, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1533 + * + * if getattr(thread, 'pydev_do_not_trace', None): + * py_db.disable_tracing() # <<<<<<<<<<<<<< + * return None, False + * + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_disable_tracing); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1533, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_7) : __Pyx_PyObject_CallNoArg(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1533, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1534 + * if getattr(thread, 'pydev_do_not_trace', None): + * py_db.disable_tracing() + * return None, False # <<<<<<<<<<<<<< + * + * try: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_tuple__8); + __pyx_r = __pyx_tuple__8; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1532 + * thread = py_db.threading_current_thread() + * + * if getattr(thread, 'pydev_do_not_trace', None): # <<<<<<<<<<<<<< + * py_db.disable_tracing() + * return None, False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1536 + * return None, False + * + * try: # <<<<<<<<<<<<<< + * additional_info = thread.additional_info + * if additional_info is None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_13); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1537 + * + * try: + * additional_info = thread.additional_info # <<<<<<<<<<<<<< + * if additional_info is None: + * raise AttributeError() + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_thread, __pyx_n_s_additional_info); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1537, __pyx_L26_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v_additional_info = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1538 + * try: + * additional_info = thread.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + __pyx_t_1 = (__pyx_v_additional_info == Py_None); + __pyx_t_2 = (__pyx_t_1 != 0); + if (unlikely(__pyx_t_2)) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1539 + * additional_info = thread.additional_info + * if additional_info is None: + * raise AttributeError() # <<<<<<<<<<<<<< + * except: + * additional_info = py_db.set_additional_thread_info(thread) + */ + __pyx_t_4 = __Pyx_PyObject_CallNoArg(__pyx_builtin_AttributeError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1539, __pyx_L26_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(0, 1539, __pyx_L26_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1538 + * try: + * additional_info = thread.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1536 + * return None, False + * + * try: # <<<<<<<<<<<<<< + * additional_info = thread.additional_info + * if additional_info is None: + */ + } + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + goto __pyx_L31_try_end; + __pyx_L26_error:; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1540 + * if additional_info is None: + * raise AttributeError() + * except: # <<<<<<<<<<<<<< + * additional_info = py_db.set_additional_thread_info(thread) + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.fix_top_level_trace_and_get_trace_func", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_4, &__pyx_t_3, &__pyx_t_7) < 0) __PYX_ERR(0, 1540, __pyx_L28_except_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_7); + + /* "_pydevd_bundle/pydevd_cython.pyx":1541 + * raise AttributeError() + * except: + * additional_info = py_db.set_additional_thread_info(thread) # <<<<<<<<<<<<<< + * + * # print('enter thread tracer', thread, get_current_thread_id(thread)) + */ + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_set_additional_thread_info); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 1541, __pyx_L28_except_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_14 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + } + } + __pyx_t_9 = (__pyx_t_14) ? __Pyx_PyObject_Call2Args(__pyx_t_10, __pyx_t_14, __pyx_v_thread) : __Pyx_PyObject_CallOneArg(__pyx_t_10, __pyx_v_thread); + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1541, __pyx_L28_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF_SET(__pyx_v_additional_info, __pyx_t_9); + __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L27_exception_handled; + } + __pyx_L28_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1536 + * return None, False + * + * try: # <<<<<<<<<<<<<< + * additional_info = thread.additional_info + * if additional_info is None: + */ + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ExceptionReset(__pyx_t_11, __pyx_t_12, __pyx_t_13); + goto __pyx_L1_error; + __pyx_L27_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ExceptionReset(__pyx_t_11, __pyx_t_12, __pyx_t_13); + __pyx_L31_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1544 + * + * # print('enter thread tracer', thread, get_current_thread_id(thread)) + * args = (py_db, thread, additional_info, global_cache_skips, global_cache_frame_skips) # <<<<<<<<<<<<<< + * + * if f_unhandled is not None: + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_global_cache_skips); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1544, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_global_cache_frame_skips); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1544, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyTuple_New(5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1544, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_additional_info); + __Pyx_GIVEREF(__pyx_v_additional_info); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_additional_info); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_4, 4, __pyx_t_3); + __pyx_t_7 = 0; + __pyx_t_3 = 0; + __pyx_v_args = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1546 + * args = (py_db, thread, additional_info, global_cache_skips, global_cache_frame_skips) + * + * if f_unhandled is not None: # <<<<<<<<<<<<<< + * if f_unhandled.f_back is None and not force_only_unhandled_tracer: + * # Happens when we attach to a running program (cannot reuse instance because it's mutable). + */ + __pyx_t_2 = (__pyx_v_f_unhandled != Py_None); + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1547 + * + * if f_unhandled is not None: + * if f_unhandled.f_back is None and not force_only_unhandled_tracer: # <<<<<<<<<<<<<< + * # Happens when we attach to a running program (cannot reuse instance because it's mutable). + * top_level_thread_tracer = TopLevelThreadTracerNoBackFrame(ThreadTracer(args), args) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1547, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_2 = (__pyx_t_4 == Py_None); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_8 = (__pyx_t_2 != 0); + if (__pyx_t_8) { + } else { + __pyx_t_1 = __pyx_t_8; + goto __pyx_L37_bool_binop_done; + } + __pyx_t_8 = ((!(__pyx_v_force_only_unhandled_tracer != 0)) != 0); + __pyx_t_1 = __pyx_t_8; + __pyx_L37_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1549 + * if f_unhandled.f_back is None and not force_only_unhandled_tracer: + * # Happens when we attach to a running program (cannot reuse instance because it's mutable). + * top_level_thread_tracer = TopLevelThreadTracerNoBackFrame(ThreadTracer(args), args) # <<<<<<<<<<<<<< + * additional_info.top_level_thread_tracer_no_back_frames.append(top_level_thread_tracer) # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). + * else: + */ + __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer), __pyx_v_args); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1549, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1549, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); + __Pyx_INCREF(__pyx_v_args); + __Pyx_GIVEREF(__pyx_v_args); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_args); + __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame), __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1549, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_top_level_thread_tracer = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1550 + * # Happens when we attach to a running program (cannot reuse instance because it's mutable). + * top_level_thread_tracer = TopLevelThreadTracerNoBackFrame(ThreadTracer(args), args) + * additional_info.top_level_thread_tracer_no_back_frames.append(top_level_thread_tracer) # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). # <<<<<<<<<<<<<< + * else: + * top_level_thread_tracer = additional_info.top_level_thread_tracer_unhandled + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_additional_info, __pyx_n_s_top_level_thread_tracer_no_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1550, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_15 = __Pyx_PyObject_Append(__pyx_t_4, __pyx_v_top_level_thread_tracer); if (unlikely(__pyx_t_15 == ((int)-1))) __PYX_ERR(0, 1550, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1547 + * + * if f_unhandled is not None: + * if f_unhandled.f_back is None and not force_only_unhandled_tracer: # <<<<<<<<<<<<<< + * # Happens when we attach to a running program (cannot reuse instance because it's mutable). + * top_level_thread_tracer = TopLevelThreadTracerNoBackFrame(ThreadTracer(args), args) + */ + goto __pyx_L36; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1552 + * additional_info.top_level_thread_tracer_no_back_frames.append(top_level_thread_tracer) # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). + * else: + * top_level_thread_tracer = additional_info.top_level_thread_tracer_unhandled # <<<<<<<<<<<<<< + * if top_level_thread_tracer is None: + * # Stop in some internal place to report about unhandled exceptions + */ + /*else*/ { + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_additional_info, __pyx_n_s_top_level_thread_tracer_unhandle); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1552, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v_top_level_thread_tracer = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1553 + * else: + * top_level_thread_tracer = additional_info.top_level_thread_tracer_unhandled + * if top_level_thread_tracer is None: # <<<<<<<<<<<<<< + * # Stop in some internal place to report about unhandled exceptions + * top_level_thread_tracer = TopLevelThreadTracerOnlyUnhandledExceptions(args) + */ + __pyx_t_1 = (__pyx_v_top_level_thread_tracer == Py_None); + __pyx_t_8 = (__pyx_t_1 != 0); + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1555 + * if top_level_thread_tracer is None: + * # Stop in some internal place to report about unhandled exceptions + * top_level_thread_tracer = TopLevelThreadTracerOnlyUnhandledExceptions(args) # <<<<<<<<<<<<<< + * additional_info.top_level_thread_tracer_unhandled = top_level_thread_tracer # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). + * + */ + __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions), __pyx_v_args); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1555, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_top_level_thread_tracer, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1556 + * # Stop in some internal place to report about unhandled exceptions + * top_level_thread_tracer = TopLevelThreadTracerOnlyUnhandledExceptions(args) + * additional_info.top_level_thread_tracer_unhandled = top_level_thread_tracer # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). # <<<<<<<<<<<<<< + * + * # print(' --> found to trace unhandled', f_unhandled.f_code.co_name, f_unhandled.f_code.co_filename, f_unhandled.f_code.co_firstlineno) + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_additional_info, __pyx_n_s_top_level_thread_tracer_unhandle, __pyx_v_top_level_thread_tracer) < 0) __PYX_ERR(0, 1556, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1553 + * else: + * top_level_thread_tracer = additional_info.top_level_thread_tracer_unhandled + * if top_level_thread_tracer is None: # <<<<<<<<<<<<<< + * # Stop in some internal place to report about unhandled exceptions + * top_level_thread_tracer = TopLevelThreadTracerOnlyUnhandledExceptions(args) + */ + } + } + __pyx_L36:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1559 + * + * # print(' --> found to trace unhandled', f_unhandled.f_code.co_name, f_unhandled.f_code.co_filename, f_unhandled.f_code.co_firstlineno) + * f_trace = top_level_thread_tracer.get_trace_dispatch_func() # <<<<<<<<<<<<<< + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * f_trace = SafeCallWrapper(f_trace) + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_top_level_thread_tracer, __pyx_n_s_get_trace_dispatch_func); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1559, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_7) : __Pyx_PyObject_CallNoArg(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1559, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_f_trace = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1561 + * f_trace = top_level_thread_tracer.get_trace_dispatch_func() + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * f_trace = SafeCallWrapper(f_trace) # <<<<<<<<<<<<<< + * # ENDIF + * f_unhandled.f_trace = f_trace + */ + __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), __pyx_v_f_trace); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1561, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_f_trace, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1563 + * f_trace = SafeCallWrapper(f_trace) + * # ENDIF + * f_unhandled.f_trace = f_trace # <<<<<<<<<<<<<< + * + * if frame is f_unhandled: + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_f_unhandled, __pyx_n_s_f_trace, __pyx_v_f_trace) < 0) __PYX_ERR(0, 1563, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1565 + * f_unhandled.f_trace = f_trace + * + * if frame is f_unhandled: # <<<<<<<<<<<<<< + * return f_trace, False + * + */ + __pyx_t_8 = (__pyx_v_frame == __pyx_v_f_unhandled); + __pyx_t_1 = (__pyx_t_8 != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1566 + * + * if frame is f_unhandled: + * return f_trace, False # <<<<<<<<<<<<<< + * + * thread_tracer = additional_info.thread_tracer + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1566, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(__pyx_v_f_trace); + __Pyx_GIVEREF(__pyx_v_f_trace); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_f_trace); + __Pyx_INCREF(Py_False); + __Pyx_GIVEREF(Py_False); + PyTuple_SET_ITEM(__pyx_t_4, 1, Py_False); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1565 + * f_unhandled.f_trace = f_trace + * + * if frame is f_unhandled: # <<<<<<<<<<<<<< + * return f_trace, False + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1546 + * args = (py_db, thread, additional_info, global_cache_skips, global_cache_frame_skips) + * + * if f_unhandled is not None: # <<<<<<<<<<<<<< + * if f_unhandled.f_back is None and not force_only_unhandled_tracer: + * # Happens when we attach to a running program (cannot reuse instance because it's mutable). + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1568 + * return f_trace, False + * + * thread_tracer = additional_info.thread_tracer # <<<<<<<<<<<<<< + * if thread_tracer is None or thread_tracer._args[0] is not py_db: + * thread_tracer = ThreadTracer(args) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_additional_info, __pyx_n_s_thread_tracer); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1568, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v_thread_tracer = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1569 + * + * thread_tracer = additional_info.thread_tracer + * if thread_tracer is None or thread_tracer._args[0] is not py_db: # <<<<<<<<<<<<<< + * thread_tracer = ThreadTracer(args) + * additional_info.thread_tracer = thread_tracer + */ + __pyx_t_8 = (__pyx_v_thread_tracer == Py_None); + __pyx_t_2 = (__pyx_t_8 != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L42_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_thread_tracer, __pyx_n_s_args_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1569, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_GetItemInt(__pyx_t_4, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1569, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = (__pyx_t_3 != __pyx_v_py_db); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_8 = (__pyx_t_2 != 0); + __pyx_t_1 = __pyx_t_8; + __pyx_L42_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1570 + * thread_tracer = additional_info.thread_tracer + * if thread_tracer is None or thread_tracer._args[0] is not py_db: + * thread_tracer = ThreadTracer(args) # <<<<<<<<<<<<<< + * additional_info.thread_tracer = thread_tracer + * + */ + __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer), __pyx_v_args); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1570, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_thread_tracer, __pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1571 + * if thread_tracer is None or thread_tracer._args[0] is not py_db: + * thread_tracer = ThreadTracer(args) + * additional_info.thread_tracer = thread_tracer # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_additional_info, __pyx_n_s_thread_tracer, __pyx_v_thread_tracer) < 0) __PYX_ERR(0, 1571, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1569 + * + * thread_tracer = additional_info.thread_tracer + * if thread_tracer is None or thread_tracer._args[0] is not py_db: # <<<<<<<<<<<<<< + * thread_tracer = ThreadTracer(args) + * additional_info.thread_tracer = thread_tracer + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1574 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * return SafeCallWrapper(thread_tracer), True # <<<<<<<<<<<<<< + * # ELSE + * # return thread_tracer, True + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), __pyx_v_thread_tracer); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1574, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1574, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); + __Pyx_INCREF(Py_True); + __Pyx_GIVEREF(Py_True); + PyTuple_SET_ITEM(__pyx_t_4, 1, Py_True); + __pyx_t_3 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1452 + * + * + * def fix_top_level_trace_and_get_trace_func(py_db, frame): # <<<<<<<<<<<<<< + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef str filename; + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_14); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.fix_top_level_trace_and_get_trace_func", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_name); + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_thread); + __Pyx_XDECREF(__pyx_v_f_unhandled); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XDECREF(__pyx_v_j); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_XDECREF(__pyx_v_additional_info); + __Pyx_XDECREF(__pyx_v_top_level_thread_tracer); + __Pyx_XDECREF(__pyx_v_f_trace); + __Pyx_XDECREF(__pyx_v_thread_tracer); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1580 + * + * + * def trace_dispatch(py_db, frame, event, arg): # <<<<<<<<<<<<<< + * thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) + * if thread_trace_func is None: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9trace_dispatch(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_9trace_dispatch = {"trace_dispatch", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9trace_dispatch, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9trace_dispatch(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_py_db = 0; + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("trace_dispatch (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_py_db,&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[4] = {0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_py_db)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 4, 4, 1); __PYX_ERR(0, 1580, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 4, 4, 2); __PYX_ERR(0, 1580, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 4, 4, 3); __PYX_ERR(0, 1580, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "trace_dispatch") < 0)) __PYX_ERR(0, 1580, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 4) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + } + __pyx_v_py_db = values[0]; + __pyx_v_frame = values[1]; + __pyx_v_event = values[2]; + __pyx_v_arg = values[3]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1580, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_8trace_dispatch(__pyx_self, __pyx_v_py_db, __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_8trace_dispatch(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_py_db, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_v_thread_trace_func = NULL; + PyObject *__pyx_v_apply_to_settrace = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *(*__pyx_t_6)(PyObject *); + int __pyx_t_7; + int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("trace_dispatch", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1581 + * + * def trace_dispatch(py_db, frame, event, arg): + * thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) # <<<<<<<<<<<<<< + * if thread_trace_func is None: + * return None if event == 'call' else NO_FTRACE + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_fix_top_level_trace_and_get_trac); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1581, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_py_db, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 2+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1581, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_py_db, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 2+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1581, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_5 = PyTuple_New(2+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1581, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_v_frame); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1581, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 1581, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_2 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_2 = PyList_GET_ITEM(sequence, 0); + __pyx_t_5 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_5); + #else + __pyx_t_2 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1581, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1581, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_3 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1581, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_6 = Py_TYPE(__pyx_t_3)->tp_iternext; + index = 0; __pyx_t_2 = __pyx_t_6(__pyx_t_3); if (unlikely(!__pyx_t_2)) goto __pyx_L3_unpacking_failed; + __Pyx_GOTREF(__pyx_t_2); + index = 1; __pyx_t_5 = __pyx_t_6(__pyx_t_3); if (unlikely(!__pyx_t_5)) goto __pyx_L3_unpacking_failed; + __Pyx_GOTREF(__pyx_t_5); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_3), 2) < 0) __PYX_ERR(0, 1581, __pyx_L1_error) + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L4_unpacking_done; + __pyx_L3_unpacking_failed:; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 1581, __pyx_L1_error) + __pyx_L4_unpacking_done:; + } + __pyx_v_thread_trace_func = __pyx_t_2; + __pyx_t_2 = 0; + __pyx_v_apply_to_settrace = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1582 + * def trace_dispatch(py_db, frame, event, arg): + * thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) + * if thread_trace_func is None: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * if apply_to_settrace: + */ + __pyx_t_7 = (__pyx_v_thread_trace_func == Py_None); + __pyx_t_8 = (__pyx_t_7 != 0); + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1583 + * thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) + * if thread_trace_func is None: + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * if apply_to_settrace: + * py_db.enable_tracing(thread_trace_func) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_8 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 1583, __pyx_L1_error) + if (__pyx_t_8) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1583, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = __pyx_t_5; + __pyx_t_5 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1582 + * def trace_dispatch(py_db, frame, event, arg): + * thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) + * if thread_trace_func is None: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * if apply_to_settrace: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1584 + * if thread_trace_func is None: + * return None if event == 'call' else NO_FTRACE + * if apply_to_settrace: # <<<<<<<<<<<<<< + * py_db.enable_tracing(thread_trace_func) + * return thread_trace_func(frame, event, arg) + */ + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_v_apply_to_settrace); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 1584, __pyx_L1_error) + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1585 + * return None if event == 'call' else NO_FTRACE + * if apply_to_settrace: + * py_db.enable_tracing(thread_trace_func) # <<<<<<<<<<<<<< + * return thread_trace_func(frame, event, arg) + * + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_enable_tracing); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1585, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_1 = (__pyx_t_2) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_2, __pyx_v_thread_trace_func) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_v_thread_trace_func); + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1585, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1584 + * if thread_trace_func is None: + * return None if event == 'call' else NO_FTRACE + * if apply_to_settrace: # <<<<<<<<<<<<<< + * py_db.enable_tracing(thread_trace_func) + * return thread_trace_func(frame, event, arg) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1586 + * if apply_to_settrace: + * py_db.enable_tracing(thread_trace_func) + * return thread_trace_func(frame, event, arg) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_thread_trace_func); + __pyx_t_5 = __pyx_v_thread_trace_func; __pyx_t_2 = NULL; + __pyx_t_4 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + __pyx_t_4 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[4] = {__pyx_t_2, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1586, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[4] = {__pyx_t_2, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1586, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_3 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1586, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_2) { + __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_2); __pyx_t_2 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_4, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_4, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_4, __pyx_v_arg); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1586, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1580 + * + * + * def trace_dispatch(py_db, frame, event, arg): # <<<<<<<<<<<<<< + * thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) + * if thread_trace_func is None: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_thread_trace_func); + __Pyx_XDECREF(__pyx_v_apply_to_settrace); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1592 + * cdef class TopLevelThreadTracerOnlyUnhandledExceptions: + * cdef public tuple _args; + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args + * # ELSE + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_args,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_args)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 1592, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_args = ((PyObject*)values[0]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1592, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerOnlyUnhandledExceptions.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_args), (&PyTuple_Type), 1, "args", 1))) __PYX_ERR(0, 1592, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)__pyx_v_self), __pyx_v_args); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *__pyx_v_self, PyObject *__pyx_v_args) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1593 + * cdef public tuple _args; + * def __init__(self, tuple args): + * self._args = args # <<<<<<<<<<<<<< + * # ELSE + * # class TopLevelThreadTracerOnlyUnhandledExceptions(object): + */ + __Pyx_INCREF(__pyx_v_args); + __Pyx_GIVEREF(__pyx_v_args); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = __pyx_v_args; + + /* "_pydevd_bundle/pydevd_cython.pyx":1592 + * cdef class TopLevelThreadTracerOnlyUnhandledExceptions: + * cdef public tuple _args; + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args + * # ELSE + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1601 + * # ENDIF + * + * def trace_unhandled_exceptions(self, frame, event, arg): # <<<<<<<<<<<<<< + * # Note that we ignore the frame as this tracing method should only be put in topmost frames already. + * # print('trace_unhandled_exceptions', event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_3trace_unhandled_exceptions(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_3trace_unhandled_exceptions(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + CYTHON_UNUSED PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("trace_unhandled_exceptions (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_unhandled_exceptions", 1, 3, 3, 1); __PYX_ERR(0, 1601, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_unhandled_exceptions", 1, 3, 3, 2); __PYX_ERR(0, 1601, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "trace_unhandled_exceptions") < 0)) __PYX_ERR(0, 1601, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = values[1]; + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("trace_unhandled_exceptions", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1601, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerOnlyUnhandledExceptions.trace_unhandled_exceptions", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_2trace_unhandled_exceptions(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_2trace_unhandled_exceptions(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_v_py_db = NULL; + PyObject *__pyx_v_t = NULL; + PyObject *__pyx_v_additional_info = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("trace_unhandled_exceptions", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1604 + * # Note that we ignore the frame as this tracing method should only be put in topmost frames already. + * # print('trace_unhandled_exceptions', event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno) + * if event == 'exception' and arg is not None: # <<<<<<<<<<<<<< + * py_db, t, additional_info = self._args[0:3] + * if arg is not None: + */ + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_exception, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1604, __pyx_L1_error) + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_arg != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + __pyx_t_1 = __pyx_t_3; + __pyx_L4_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1605 + * # print('trace_unhandled_exceptions', event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno) + * if event == 'exception' and arg is not None: + * py_db, t, additional_info = self._args[0:3] # <<<<<<<<<<<<<< + * if arg is not None: + * if not additional_info.suspended_at_unhandled: + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1605, __pyx_L1_error) + } + __pyx_t_4 = __Pyx_PyTuple_GetSlice(__pyx_v_self->_args, 0, 3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1605, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + if (1) { + PyObject* sequence = __pyx_t_4; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 3)) { + if (size > 3) __Pyx_RaiseTooManyValuesError(3); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 1605, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 2); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + #else + __pyx_t_5 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1605, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1605, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1605, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + #endif + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __pyx_v_py_db = __pyx_t_5; + __pyx_t_5 = 0; + __pyx_v_t = __pyx_t_6; + __pyx_t_6 = 0; + __pyx_v_additional_info = __pyx_t_7; + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1606 + * if event == 'exception' and arg is not None: + * py_db, t, additional_info = self._args[0:3] + * if arg is not None: # <<<<<<<<<<<<<< + * if not additional_info.suspended_at_unhandled: + * additional_info.suspended_at_unhandled = True + */ + __pyx_t_1 = (__pyx_v_arg != Py_None); + __pyx_t_3 = (__pyx_t_1 != 0); + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1607 + * py_db, t, additional_info = self._args[0:3] + * if arg is not None: + * if not additional_info.suspended_at_unhandled: # <<<<<<<<<<<<<< + * additional_info.suspended_at_unhandled = True + * + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_additional_info, __pyx_n_s_suspended_at_unhandled); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1607, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1607, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_1 = ((!__pyx_t_3) != 0); + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1608 + * if arg is not None: + * if not additional_info.suspended_at_unhandled: + * additional_info.suspended_at_unhandled = True # <<<<<<<<<<<<<< + * + * py_db.stop_on_unhandled_exception(py_db, t, additional_info, arg) + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_additional_info, __pyx_n_s_suspended_at_unhandled, Py_True) < 0) __PYX_ERR(0, 1608, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1610 + * additional_info.suspended_at_unhandled = True + * + * py_db.stop_on_unhandled_exception(py_db, t, additional_info, arg) # <<<<<<<<<<<<<< + * + * # No need to reset frame.f_trace to keep the same trace function. + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_stop_on_unhandled_exception); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1610, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[5] = {__pyx_t_6, __pyx_v_py_db, __pyx_v_t, __pyx_v_additional_info, __pyx_v_arg}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_8, 4+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1610, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[5] = {__pyx_t_6, __pyx_v_py_db, __pyx_v_t, __pyx_v_additional_info, __pyx_v_arg}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_8, 4+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1610, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_5 = PyTuple_New(4+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1610, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_8, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_t); + __Pyx_GIVEREF(__pyx_v_t); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_8, __pyx_v_t); + __Pyx_INCREF(__pyx_v_additional_info); + __Pyx_GIVEREF(__pyx_v_additional_info); + PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_8, __pyx_v_additional_info); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_5, 3+__pyx_t_8, __pyx_v_arg); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1610, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1607 + * py_db, t, additional_info = self._args[0:3] + * if arg is not None: + * if not additional_info.suspended_at_unhandled: # <<<<<<<<<<<<<< + * additional_info.suspended_at_unhandled = True + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1606 + * if event == 'exception' and arg is not None: + * py_db, t, additional_info = self._args[0:3] + * if arg is not None: # <<<<<<<<<<<<<< + * if not additional_info.suspended_at_unhandled: + * additional_info.suspended_at_unhandled = True + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1604 + * # Note that we ignore the frame as this tracing method should only be put in topmost frames already. + * # print('trace_unhandled_exceptions', event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno) + * if event == 'exception' and arg is not None: # <<<<<<<<<<<<<< + * py_db, t, additional_info = self._args[0:3] + * if arg is not None: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1613 + * + * # No need to reset frame.f_trace to keep the same trace function. + * return self.trace_unhandled_exceptions # <<<<<<<<<<<<<< + * + * def get_trace_dispatch_func(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_unhandled_exceptions); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1613, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1601 + * # ENDIF + * + * def trace_unhandled_exceptions(self, frame, event, arg): # <<<<<<<<<<<<<< + * # Note that we ignore the frame as this tracing method should only be put in topmost frames already. + * # print('trace_unhandled_exceptions', event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerOnlyUnhandledExceptions.trace_unhandled_exceptions", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_py_db); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_XDECREF(__pyx_v_additional_info); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1615 + * return self.trace_unhandled_exceptions + * + * def get_trace_dispatch_func(self): # <<<<<<<<<<<<<< + * return self.trace_unhandled_exceptions + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5get_trace_dispatch_func(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5get_trace_dispatch_func(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_trace_dispatch_func (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_4get_trace_dispatch_func(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_4get_trace_dispatch_func(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_trace_dispatch_func", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1616 + * + * def get_trace_dispatch_func(self): + * return self.trace_unhandled_exceptions # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_unhandled_exceptions); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1616, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1615 + * return self.trace_unhandled_exceptions + * + * def get_trace_dispatch_func(self): # <<<<<<<<<<<<<< + * return self.trace_unhandled_exceptions + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerOnlyUnhandledExceptions.get_trace_dispatch_func", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1591 + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class TopLevelThreadTracerOnlyUnhandledExceptions: + * cdef public tuple _args; # <<<<<<<<<<<<<< + * def __init__(self, tuple args): + * self._args = args + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_args); + __pyx_r = __pyx_v_self->_args; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyTuple_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 1591, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerOnlyUnhandledExceptions._args.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_6__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_6__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self._args,) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->_args); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self._args,) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self._args,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); + __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self._args is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self._args,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self._args is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, None), state + */ + /*else*/ { + __pyx_t_3 = (__pyx_v_self->_args != ((PyObject*)Py_None)); + __pyx_v_use_setstate = __pyx_t_3; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._args is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, None), state + * else: + */ + __pyx_t_3 = (__pyx_v_use_setstate != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":13 + * use_setstate = self._args is not None + * if use_setstate: + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_TopLevelThreadTra); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_64458794); + __Pyx_GIVEREF(__pyx_int_64458794); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_64458794); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._args is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, None), state + * else: + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle_TopLevelThreadTra); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_64458794); + __Pyx_GIVEREF(__pyx_int_64458794); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_64458794); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __pyx_t_5 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerOnlyUnhandledExceptions.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_8__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_8__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, (type(self), 0x3d7902a, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerOnlyUnhandledExceptions.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1627 + * cdef public set _raise_lines; + * cdef public int _last_raise_line; + * def __init__(self, frame_trace_dispatch, tuple args): # <<<<<<<<<<<<<< + * self._frame_trace_dispatch = frame_trace_dispatch + * self._args = args + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame_trace_dispatch = 0; + PyObject *__pyx_v_args = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame_trace_dispatch,&__pyx_n_s_args,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame_trace_dispatch)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_args)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 2, 2, 1); __PYX_ERR(0, 1627, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 1627, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_frame_trace_dispatch = values[0]; + __pyx_v_args = ((PyObject*)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1627, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_args), (&PyTuple_Type), 1, "args", 1))) __PYX_ERR(0, 1627, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self), __pyx_v_frame_trace_dispatch, __pyx_v_args); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self, PyObject *__pyx_v_frame_trace_dispatch, PyObject *__pyx_v_args) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1628 + * cdef public int _last_raise_line; + * def __init__(self, frame_trace_dispatch, tuple args): + * self._frame_trace_dispatch = frame_trace_dispatch # <<<<<<<<<<<<<< + * self._args = args + * self.try_except_infos = None + */ + __Pyx_INCREF(__pyx_v_frame_trace_dispatch); + __Pyx_GIVEREF(__pyx_v_frame_trace_dispatch); + __Pyx_GOTREF(__pyx_v_self->_frame_trace_dispatch); + __Pyx_DECREF(__pyx_v_self->_frame_trace_dispatch); + __pyx_v_self->_frame_trace_dispatch = __pyx_v_frame_trace_dispatch; + + /* "_pydevd_bundle/pydevd_cython.pyx":1629 + * def __init__(self, frame_trace_dispatch, tuple args): + * self._frame_trace_dispatch = frame_trace_dispatch + * self._args = args # <<<<<<<<<<<<<< + * self.try_except_infos = None + * self._last_exc_arg = None + */ + __Pyx_INCREF(__pyx_v_args); + __Pyx_GIVEREF(__pyx_v_args); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = __pyx_v_args; + + /* "_pydevd_bundle/pydevd_cython.pyx":1630 + * self._frame_trace_dispatch = frame_trace_dispatch + * self._args = args + * self.try_except_infos = None # <<<<<<<<<<<<<< + * self._last_exc_arg = None + * self._raise_lines = set() + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->try_except_infos); + __Pyx_DECREF(__pyx_v_self->try_except_infos); + __pyx_v_self->try_except_infos = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":1631 + * self._args = args + * self.try_except_infos = None + * self._last_exc_arg = None # <<<<<<<<<<<<<< + * self._raise_lines = set() + * self._last_raise_line = -1 + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_last_exc_arg); + __Pyx_DECREF(__pyx_v_self->_last_exc_arg); + __pyx_v_self->_last_exc_arg = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":1632 + * self.try_except_infos = None + * self._last_exc_arg = None + * self._raise_lines = set() # <<<<<<<<<<<<<< + * self._last_raise_line = -1 + * # ELSE + */ + __pyx_t_1 = PySet_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1632, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_raise_lines); + __Pyx_DECREF(__pyx_v_self->_raise_lines); + __pyx_v_self->_raise_lines = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1633 + * self._last_exc_arg = None + * self._raise_lines = set() + * self._last_raise_line = -1 # <<<<<<<<<<<<<< + * # ELSE + * # class TopLevelThreadTracerNoBackFrame(object): + */ + __pyx_v_self->_last_raise_line = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":1627 + * cdef public set _raise_lines; + * cdef public int _last_raise_line; + * def __init__(self, frame_trace_dispatch, tuple args): # <<<<<<<<<<<<<< + * self._frame_trace_dispatch = frame_trace_dispatch + * self._args = args + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1657 + * # ENDIF + * + * def trace_dispatch_and_unhandled_exceptions(self, frame, event, arg): # <<<<<<<<<<<<<< + * # DEBUG = 'code_to_debug' in frame.f_code.co_filename + * # if DEBUG: print('trace_dispatch_and_unhandled_exceptions: %s %s %s %s %s %s' % (event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno, self._frame_trace_dispatch, frame.f_lineno)) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_3trace_dispatch_and_unhandled_exceptions(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_3trace_dispatch_and_unhandled_exceptions(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("trace_dispatch_and_unhandled_exceptions (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch_and_unhandled_exceptions", 1, 3, 3, 1); __PYX_ERR(0, 1657, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch_and_unhandled_exceptions", 1, 3, 3, 2); __PYX_ERR(0, 1657, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "trace_dispatch_and_unhandled_exceptions") < 0)) __PYX_ERR(0, 1657, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = values[1]; + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("trace_dispatch_and_unhandled_exceptions", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1657, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame.trace_dispatch_and_unhandled_exceptions", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_2trace_dispatch_and_unhandled_exceptions(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_2trace_dispatch_and_unhandled_exceptions(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_v_frame_trace_dispatch = NULL; + PyObject *__pyx_v_py_db = NULL; + PyObject *__pyx_v_t = NULL; + PyObject *__pyx_v_additional_info = NULL; + PyObject *__pyx_v_ret = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + int __pyx_t_9; + int __pyx_t_10; + char const *__pyx_t_11; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("trace_dispatch_and_unhandled_exceptions", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1660 + * # DEBUG = 'code_to_debug' in frame.f_code.co_filename + * # if DEBUG: print('trace_dispatch_and_unhandled_exceptions: %s %s %s %s %s %s' % (event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno, self._frame_trace_dispatch, frame.f_lineno)) + * frame_trace_dispatch = self._frame_trace_dispatch # <<<<<<<<<<<<<< + * if frame_trace_dispatch is not None: + * self._frame_trace_dispatch = frame_trace_dispatch(frame, event, arg) + */ + __pyx_t_1 = __pyx_v_self->_frame_trace_dispatch; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_frame_trace_dispatch = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1661 + * # if DEBUG: print('trace_dispatch_and_unhandled_exceptions: %s %s %s %s %s %s' % (event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno, self._frame_trace_dispatch, frame.f_lineno)) + * frame_trace_dispatch = self._frame_trace_dispatch + * if frame_trace_dispatch is not None: # <<<<<<<<<<<<<< + * self._frame_trace_dispatch = frame_trace_dispatch(frame, event, arg) + * + */ + __pyx_t_2 = (__pyx_v_frame_trace_dispatch != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1662 + * frame_trace_dispatch = self._frame_trace_dispatch + * if frame_trace_dispatch is not None: + * self._frame_trace_dispatch = frame_trace_dispatch(frame, event, arg) # <<<<<<<<<<<<<< + * + * if event == 'exception': + */ + __Pyx_INCREF(__pyx_v_frame_trace_dispatch); + __pyx_t_4 = __pyx_v_frame_trace_dispatch; __pyx_t_5 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1662, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1662, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_7 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1662, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_6, __pyx_v_arg); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1662, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_frame_trace_dispatch); + __Pyx_DECREF(__pyx_v_self->_frame_trace_dispatch); + __pyx_v_self->_frame_trace_dispatch = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1661 + * # if DEBUG: print('trace_dispatch_and_unhandled_exceptions: %s %s %s %s %s %s' % (event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno, self._frame_trace_dispatch, frame.f_lineno)) + * frame_trace_dispatch = self._frame_trace_dispatch + * if frame_trace_dispatch is not None: # <<<<<<<<<<<<<< + * self._frame_trace_dispatch = frame_trace_dispatch(frame, event, arg) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1664 + * self._frame_trace_dispatch = frame_trace_dispatch(frame, event, arg) + * + * if event == 'exception': # <<<<<<<<<<<<<< + * self._last_exc_arg = arg + * self._raise_lines.add(frame.f_lineno) + */ + __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_exception, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1664, __pyx_L1_error) + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1665 + * + * if event == 'exception': + * self._last_exc_arg = arg # <<<<<<<<<<<<<< + * self._raise_lines.add(frame.f_lineno) + * self._last_raise_line = frame.f_lineno + */ + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + __Pyx_GOTREF(__pyx_v_self->_last_exc_arg); + __Pyx_DECREF(__pyx_v_self->_last_exc_arg); + __pyx_v_self->_last_exc_arg = __pyx_v_arg; + + /* "_pydevd_bundle/pydevd_cython.pyx":1666 + * if event == 'exception': + * self._last_exc_arg = arg + * self._raise_lines.add(frame.f_lineno) # <<<<<<<<<<<<<< + * self._last_raise_line = frame.f_lineno + * + */ + if (unlikely(__pyx_v_self->_raise_lines == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "add"); + __PYX_ERR(0, 1666, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1666, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = PySet_Add(__pyx_v_self->_raise_lines, __pyx_t_1); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 1666, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1667 + * self._last_exc_arg = arg + * self._raise_lines.add(frame.f_lineno) + * self._last_raise_line = frame.f_lineno # <<<<<<<<<<<<<< + * + * elif event == 'return' and self._last_exc_arg is not None: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1667, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1667, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_self->_last_raise_line = __pyx_t_6; + + /* "_pydevd_bundle/pydevd_cython.pyx":1664 + * self._frame_trace_dispatch = frame_trace_dispatch(frame, event, arg) + * + * if event == 'exception': # <<<<<<<<<<<<<< + * self._last_exc_arg = arg + * self._raise_lines.add(frame.f_lineno) + */ + goto __pyx_L4; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1669 + * self._last_raise_line = frame.f_lineno + * + * elif event == 'return' and self._last_exc_arg is not None: # <<<<<<<<<<<<<< + * # For unhandled exceptions we actually track the return when at the topmost level. + * try: + */ + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_return, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1669, __pyx_L1_error) + if (__pyx_t_2) { + } else { + __pyx_t_3 = __pyx_t_2; + goto __pyx_L5_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_self->_last_exc_arg != Py_None); + __pyx_t_9 = (__pyx_t_2 != 0); + __pyx_t_3 = __pyx_t_9; + __pyx_L5_bool_binop_done:; + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1671 + * elif event == 'return' and self._last_exc_arg is not None: + * # For unhandled exceptions we actually track the return when at the topmost level. + * try: # <<<<<<<<<<<<<< + * py_db, t, additional_info = self._args[0:3] + * if not additional_info.suspended_at_unhandled: # Note: only check it here, don't set. + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1672 + * # For unhandled exceptions we actually track the return when at the topmost level. + * try: + * py_db, t, additional_info = self._args[0:3] # <<<<<<<<<<<<<< + * if not additional_info.suspended_at_unhandled: # Note: only check it here, don't set. + * if is_unhandled_exception(self, py_db, frame, self._last_raise_line, self._raise_lines): + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1672, __pyx_L8_error) + } + __pyx_t_1 = __Pyx_PyTuple_GetSlice(__pyx_v_self->_args, 0, 3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1672, __pyx_L8_error) + __Pyx_GOTREF(__pyx_t_1); + if (1) { + PyObject* sequence = __pyx_t_1; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 3)) { + if (size > 3) __Pyx_RaiseTooManyValuesError(3); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 1672, __pyx_L8_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_5); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1672, __pyx_L8_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1672, __pyx_L8_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_5 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1672, __pyx_L8_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __pyx_v_py_db = __pyx_t_4; + __pyx_t_4 = 0; + __pyx_v_t = __pyx_t_7; + __pyx_t_7 = 0; + __pyx_v_additional_info = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1673 + * try: + * py_db, t, additional_info = self._args[0:3] + * if not additional_info.suspended_at_unhandled: # Note: only check it here, don't set. # <<<<<<<<<<<<<< + * if is_unhandled_exception(self, py_db, frame, self._last_raise_line, self._raise_lines): + * py_db.stop_on_unhandled_exception(py_db, t, additional_info, self._last_exc_arg) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_additional_info, __pyx_n_s_suspended_at_unhandled); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1673, __pyx_L8_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1673, __pyx_L8_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_9 = ((!__pyx_t_3) != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1674 + * py_db, t, additional_info = self._args[0:3] + * if not additional_info.suspended_at_unhandled: # Note: only check it here, don't set. + * if is_unhandled_exception(self, py_db, frame, self._last_raise_line, self._raise_lines): # <<<<<<<<<<<<<< + * py_db.stop_on_unhandled_exception(py_db, t, additional_info, self._last_exc_arg) + * finally: + */ + __pyx_t_1 = __pyx_v_self->_raise_lines; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_5 = __pyx_f_14_pydevd_bundle_13pydevd_cython_is_unhandled_exception(((PyObject *)__pyx_v_self), __pyx_v_py_db, __pyx_v_frame, __pyx_v_self->_last_raise_line, ((PyObject*)__pyx_t_1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1674, __pyx_L8_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1674, __pyx_L8_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1675 + * if not additional_info.suspended_at_unhandled: # Note: only check it here, don't set. + * if is_unhandled_exception(self, py_db, frame, self._last_raise_line, self._raise_lines): + * py_db.stop_on_unhandled_exception(py_db, t, additional_info, self._last_exc_arg) # <<<<<<<<<<<<<< + * finally: + * # Remove reference to exception after handling it. + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_stop_on_unhandled_exception); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1675, __pyx_L8_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_py_db, __pyx_v_t, __pyx_v_additional_info, __pyx_v_self->_last_exc_arg}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 4+__pyx_t_6); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1675, __pyx_L8_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_py_db, __pyx_v_t, __pyx_v_additional_info, __pyx_v_self->_last_exc_arg}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 4+__pyx_t_6); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1675, __pyx_L8_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_4 = PyTuple_New(4+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1675, __pyx_L8_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_t); + __Pyx_GIVEREF(__pyx_v_t); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, __pyx_v_t); + __Pyx_INCREF(__pyx_v_additional_info); + __Pyx_GIVEREF(__pyx_v_additional_info); + PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_6, __pyx_v_additional_info); + __Pyx_INCREF(__pyx_v_self->_last_exc_arg); + __Pyx_GIVEREF(__pyx_v_self->_last_exc_arg); + PyTuple_SET_ITEM(__pyx_t_4, 3+__pyx_t_6, __pyx_v_self->_last_exc_arg); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1675, __pyx_L8_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1674 + * py_db, t, additional_info = self._args[0:3] + * if not additional_info.suspended_at_unhandled: # Note: only check it here, don't set. + * if is_unhandled_exception(self, py_db, frame, self._last_raise_line, self._raise_lines): # <<<<<<<<<<<<<< + * py_db.stop_on_unhandled_exception(py_db, t, additional_info, self._last_exc_arg) + * finally: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1673 + * try: + * py_db, t, additional_info = self._args[0:3] + * if not additional_info.suspended_at_unhandled: # Note: only check it here, don't set. # <<<<<<<<<<<<<< + * if is_unhandled_exception(self, py_db, frame, self._last_raise_line, self._raise_lines): + * py_db.stop_on_unhandled_exception(py_db, t, additional_info, self._last_exc_arg) + */ + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1678 + * finally: + * # Remove reference to exception after handling it. + * self._last_exc_arg = None # <<<<<<<<<<<<<< + * + * ret = self.trace_dispatch_and_unhandled_exceptions + */ + /*finally:*/ { + /*normal exit:*/{ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_last_exc_arg); + __Pyx_DECREF(__pyx_v_self->_last_exc_arg); + __pyx_v_self->_last_exc_arg = Py_None; + goto __pyx_L9; + } + __pyx_L8_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14) < 0)) __Pyx_ErrFetch(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __pyx_t_6 = __pyx_lineno; __pyx_t_10 = __pyx_clineno; __pyx_t_11 = __pyx_filename; + { + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_last_exc_arg); + __Pyx_DECREF(__pyx_v_self->_last_exc_arg); + __pyx_v_self->_last_exc_arg = Py_None; + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_ErrRestore(__pyx_t_12, __pyx_t_13, __pyx_t_14); + __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __pyx_lineno = __pyx_t_6; __pyx_clineno = __pyx_t_10; __pyx_filename = __pyx_t_11; + goto __pyx_L1_error; + } + __pyx_L9:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1669 + * self._last_raise_line = frame.f_lineno + * + * elif event == 'return' and self._last_exc_arg is not None: # <<<<<<<<<<<<<< + * # For unhandled exceptions we actually track the return when at the topmost level. + * try: + */ + } + __pyx_L4:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1680 + * self._last_exc_arg = None + * + * ret = self.trace_dispatch_and_unhandled_exceptions # <<<<<<<<<<<<<< + * + * # Need to reset (the call to _frame_trace_dispatch may have changed it). + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch_and_unhandled_exc); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1680, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_v_ret = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1684 + * # Need to reset (the call to _frame_trace_dispatch may have changed it). + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * frame.f_trace = SafeCallWrapper(ret) # <<<<<<<<<<<<<< + * # ELSE + * # frame.f_trace = ret + */ + __pyx_t_5 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), __pyx_v_ret); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1684, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_5) < 0) __PYX_ERR(0, 1684, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1688 + * # frame.f_trace = ret + * # ENDIF + * return ret # <<<<<<<<<<<<<< + * + * def get_trace_dispatch_func(self): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_ret); + __pyx_r = __pyx_v_ret; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1657 + * # ENDIF + * + * def trace_dispatch_and_unhandled_exceptions(self, frame, event, arg): # <<<<<<<<<<<<<< + * # DEBUG = 'code_to_debug' in frame.f_code.co_filename + * # if DEBUG: print('trace_dispatch_and_unhandled_exceptions: %s %s %s %s %s %s' % (event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno, self._frame_trace_dispatch, frame.f_lineno)) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame.trace_dispatch_and_unhandled_exceptions", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_frame_trace_dispatch); + __Pyx_XDECREF(__pyx_v_py_db); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_XDECREF(__pyx_v_additional_info); + __Pyx_XDECREF(__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1690 + * return ret + * + * def get_trace_dispatch_func(self): # <<<<<<<<<<<<<< + * return self.trace_dispatch_and_unhandled_exceptions + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5get_trace_dispatch_func(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5get_trace_dispatch_func(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_trace_dispatch_func (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_4get_trace_dispatch_func(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_4get_trace_dispatch_func(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_trace_dispatch_func", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1691 + * + * def get_trace_dispatch_func(self): + * return self.trace_dispatch_and_unhandled_exceptions # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch_and_unhandled_exc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1691, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1690 + * return ret + * + * def get_trace_dispatch_func(self): # <<<<<<<<<<<<<< + * return self.trace_dispatch_and_unhandled_exceptions + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame.get_trace_dispatch_func", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1621 + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class TopLevelThreadTracerNoBackFrame: + * cdef public object _frame_trace_dispatch; # <<<<<<<<<<<<<< + * cdef public tuple _args; + * cdef public object try_except_infos; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_frame_trace_dispatch); + __pyx_r = __pyx_v_self->_frame_trace_dispatch; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->_frame_trace_dispatch); + __Pyx_DECREF(__pyx_v_self->_frame_trace_dispatch); + __pyx_v_self->_frame_trace_dispatch = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_frame_trace_dispatch); + __Pyx_DECREF(__pyx_v_self->_frame_trace_dispatch); + __pyx_v_self->_frame_trace_dispatch = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1622 + * cdef class TopLevelThreadTracerNoBackFrame: + * cdef public object _frame_trace_dispatch; + * cdef public tuple _args; # <<<<<<<<<<<<<< + * cdef public object try_except_infos; + * cdef public object _last_exc_arg; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_args); + __pyx_r = __pyx_v_self->_args; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyTuple_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 1622, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame._args.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1623 + * cdef public object _frame_trace_dispatch; + * cdef public tuple _args; + * cdef public object try_except_infos; # <<<<<<<<<<<<<< + * cdef public object _last_exc_arg; + * cdef public set _raise_lines; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->try_except_infos); + __pyx_r = __pyx_v_self->try_except_infos; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->try_except_infos); + __Pyx_DECREF(__pyx_v_self->try_except_infos); + __pyx_v_self->try_except_infos = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->try_except_infos); + __Pyx_DECREF(__pyx_v_self->try_except_infos); + __pyx_v_self->try_except_infos = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1624 + * cdef public tuple _args; + * cdef public object try_except_infos; + * cdef public object _last_exc_arg; # <<<<<<<<<<<<<< + * cdef public set _raise_lines; + * cdef public int _last_raise_line; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_last_exc_arg); + __pyx_r = __pyx_v_self->_last_exc_arg; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->_last_exc_arg); + __Pyx_DECREF(__pyx_v_self->_last_exc_arg); + __pyx_v_self->_last_exc_arg = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_last_exc_arg); + __Pyx_DECREF(__pyx_v_self->_last_exc_arg); + __pyx_v_self->_last_exc_arg = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1625 + * cdef public object try_except_infos; + * cdef public object _last_exc_arg; + * cdef public set _raise_lines; # <<<<<<<<<<<<<< + * cdef public int _last_raise_line; + * def __init__(self, frame_trace_dispatch, tuple args): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_raise_lines); + __pyx_r = __pyx_v_self->_raise_lines; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PySet_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "set", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 1625, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_raise_lines); + __Pyx_DECREF(__pyx_v_self->_raise_lines); + __pyx_v_self->_raise_lines = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame._raise_lines.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_raise_lines); + __Pyx_DECREF(__pyx_v_self->_raise_lines); + __pyx_v_self->_raise_lines = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1626 + * cdef public object _last_exc_arg; + * cdef public set _raise_lines; + * cdef public int _last_raise_line; # <<<<<<<<<<<<<< + * def __init__(self, frame_trace_dispatch, tuple args): + * self._frame_trace_dispatch = frame_trace_dispatch + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->_last_raise_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1626, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame._last_raise_line.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1626, __pyx_L1_error) + __pyx_v_self->_last_raise_line = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame._last_raise_line.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_6__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_6__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self._args, self._frame_trace_dispatch, self._last_exc_arg, self._last_raise_line, self._raise_lines, self.try_except_infos) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->_last_raise_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(6); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_self->_frame_trace_dispatch); + __Pyx_GIVEREF(__pyx_v_self->_frame_trace_dispatch); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_self->_frame_trace_dispatch); + __Pyx_INCREF(__pyx_v_self->_last_exc_arg); + __Pyx_GIVEREF(__pyx_v_self->_last_exc_arg); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_self->_last_exc_arg); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_t_1); + __Pyx_INCREF(__pyx_v_self->_raise_lines); + __Pyx_GIVEREF(__pyx_v_self->_raise_lines); + PyTuple_SET_ITEM(__pyx_t_2, 4, __pyx_v_self->_raise_lines); + __Pyx_INCREF(__pyx_v_self->try_except_infos); + __Pyx_GIVEREF(__pyx_v_self->try_except_infos); + PyTuple_SET_ITEM(__pyx_t_2, 5, __pyx_v_self->try_except_infos); + __pyx_t_1 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self._args, self._frame_trace_dispatch, self._last_exc_arg, self._last_raise_line, self._raise_lines, self.try_except_infos) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_2 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v__dict = __pyx_t_2; + __pyx_t_2 = 0; + + /* "(tree fragment)":7 + * state = (self._args, self._frame_trace_dispatch, self._last_exc_arg, self._last_raise_line, self._raise_lines, self.try_except_infos) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_3 = (__pyx_v__dict != Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v__dict); + __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self._args is not None or self._frame_trace_dispatch is not None or self._last_exc_arg is not None or self._raise_lines is not None or self.try_except_infos is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self._args, self._frame_trace_dispatch, self._last_exc_arg, self._last_raise_line, self._raise_lines, self.try_except_infos) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self._args is not None or self._frame_trace_dispatch is not None or self._last_exc_arg is not None or self._raise_lines is not None or self.try_except_infos is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, None), state + */ + /*else*/ { + __pyx_t_3 = (__pyx_v_self->_args != ((PyObject*)Py_None)); + __pyx_t_5 = (__pyx_t_3 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_4 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->_frame_trace_dispatch != Py_None); + __pyx_t_3 = (__pyx_t_5 != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_4 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = (__pyx_v_self->_last_exc_arg != Py_None); + __pyx_t_5 = (__pyx_t_3 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_4 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->_raise_lines != ((PyObject*)Py_None)); + __pyx_t_3 = (__pyx_t_5 != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_4 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = (__pyx_v_self->try_except_infos != Py_None); + __pyx_t_5 = (__pyx_t_3 != 0); + __pyx_t_4 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_4; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._args is not None or self._frame_trace_dispatch is not None or self._last_exc_arg is not None or self._raise_lines is not None or self.try_except_infos is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, None), state + * else: + */ + __pyx_t_4 = (__pyx_v_use_setstate != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":13 + * use_setstate = self._args is not None or self._frame_trace_dispatch is not None or self._last_exc_arg is not None or self._raise_lines is not None or self.try_except_infos is not None + * if use_setstate: + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pyx_unpickle_TopLevelThreadTra_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_171613889); + __Pyx_GIVEREF(__pyx_int_171613889); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_171613889); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_2, 2, Py_None); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_2); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._args is not None or self._frame_trace_dispatch is not None or self._last_exc_arg is not None or self._raise_lines is not None or self.try_except_infos is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, None), state + * else: + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle_TopLevelThreadTra_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_171613889); + __Pyx_GIVEREF(__pyx_int_171613889); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_171613889); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2); + __pyx_t_6 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_8__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_8__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_TopLevelThreadTracerNoBackFrame, (type(self), 0xa3a9ec1, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1697 + * cdef class ThreadTracer: + * cdef public tuple _args; + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args + * # ELSE + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_args,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_args)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 1697, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_args = ((PyObject*)values[0]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1697, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_args), (&PyTuple_Type), 1, "args", 1))) __PYX_ERR(0, 1697, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self), __pyx_v_args); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v_args) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1698 + * cdef public tuple _args; + * def __init__(self, tuple args): + * self._args = args # <<<<<<<<<<<<<< + * # ELSE + * # class ThreadTracer(object): + */ + __Pyx_INCREF(__pyx_v_args); + __Pyx_GIVEREF(__pyx_v_args); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = __pyx_v_args; + + /* "_pydevd_bundle/pydevd_cython.pyx":1697 + * cdef class ThreadTracer: + * cdef public tuple _args; + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args + * # ELSE + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1706 + * # ENDIF + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * ''' This is the callback used when we enter some context in the debugger. + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__[] = " This is the callback used when we enter some context in the debugger.\n\n We also decorate the thread we are in with info about the debugging.\n The attributes added are:\n pydev_state\n pydev_step_stop\n pydev_step_cmd\n pydev_notify_kill\n\n :param PyDB py_db:\n This is the global debugger (this method should actually be added as a method to it).\n "; +#if CYTHON_UPDATE_DESCRIPTOR_DOC +struct wrapperbase __pyx_wrapperbase_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__; +#endif +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 3, 3, 1); __PYX_ERR(0, 1706, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 3, 3, 2); __PYX_ERR(0, 1706, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__call__") < 0)) __PYX_ERR(0, 1706, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = values[1]; + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__call__", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1706, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + int __pyx_v_pydev_step_cmd; + PyObject *__pyx_v_frame_cache_key = 0; + PyObject *__pyx_v_cache_skips = 0; + int __pyx_v_is_stepping; + PyObject *__pyx_v_abs_path_canonical_path_and_base = 0; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_additional_info = 0; + PyObject *__pyx_v_py_db = NULL; + PyObject *__pyx_v_t = NULL; + PyObject *__pyx_v_frame_skips_cache = NULL; + PyObject *__pyx_v_back_frame = NULL; + PyObject *__pyx_v_back_frame_cache_key = NULL; + PyObject *__pyx_v_file_type = NULL; + PyObject *__pyx_v_ret = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + int __pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + int __pyx_t_17; + char const *__pyx_t_18; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__call__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1732 + * # DEBUG = 'code_to_debug' in frame.f_code.co_filename + * # if DEBUG: print('ENTER: trace_dispatch: %s %s %s %s' % (frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name)) + * py_db, t, additional_info, cache_skips, frame_skips_cache = self._args # <<<<<<<<<<<<<< + * if additional_info.is_tracing: + * return None if event == 'call' else NO_FTRACE # we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + */ + __pyx_t_1 = __pyx_v_self->_args; + __Pyx_INCREF(__pyx_t_1); + if (likely(__pyx_t_1 != Py_None)) { + PyObject* sequence = __pyx_t_1; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 5)) { + if (size > 5) __Pyx_RaiseTooManyValuesError(5); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 1732, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_2 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 2); + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 3); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 4); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + #else + { + Py_ssize_t i; + PyObject** temps[5] = {&__pyx_t_2,&__pyx_t_3,&__pyx_t_4,&__pyx_t_5,&__pyx_t_6}; + for (i=0; i < 5; i++) { + PyObject* item = PySequence_ITEM(sequence, i); if (unlikely(!item)) __PYX_ERR(0, 1732, __pyx_L1_error) + __Pyx_GOTREF(item); + *(temps[i]) = item; + } + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(0, 1732, __pyx_L1_error) + } + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(0, 1732, __pyx_L1_error) + if (!(likely(PyDict_CheckExact(__pyx_t_5))||((__pyx_t_5) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_5)->tp_name), 0))) __PYX_ERR(0, 1732, __pyx_L1_error) + __pyx_v_py_db = __pyx_t_2; + __pyx_t_2 = 0; + __pyx_v_t = __pyx_t_3; + __pyx_t_3 = 0; + __pyx_v_additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_4); + __pyx_t_4 = 0; + __pyx_v_cache_skips = ((PyObject*)__pyx_t_5); + __pyx_t_5 = 0; + __pyx_v_frame_skips_cache = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1733 + * # if DEBUG: print('ENTER: trace_dispatch: %s %s %s %s' % (frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name)) + * py_db, t, additional_info, cache_skips, frame_skips_cache = self._args + * if additional_info.is_tracing: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE # we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + * + */ + __pyx_t_7 = (__pyx_v_additional_info->is_tracing != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1734 + * py_db, t, additional_info, cache_skips, frame_skips_cache = self._args + * if additional_info.is_tracing: + * return None if event == 'call' else NO_FTRACE # we don't wan't to trace code invoked from pydevd_frame.trace_dispatch # <<<<<<<<<<<<<< + * + * additional_info.is_tracing += 1 + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1734, __pyx_L1_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1734, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1733 + * # if DEBUG: print('ENTER: trace_dispatch: %s %s %s %s' % (frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name)) + * py_db, t, additional_info, cache_skips, frame_skips_cache = self._args + * if additional_info.is_tracing: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE # we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1736 + * return None if event == 'call' else NO_FTRACE # we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + * + * additional_info.is_tracing += 1 # <<<<<<<<<<<<<< + * try: + * pydev_step_cmd = additional_info.pydev_step_cmd + */ + __pyx_v_additional_info->is_tracing = (__pyx_v_additional_info->is_tracing + 1); + + /* "_pydevd_bundle/pydevd_cython.pyx":1737 + * + * additional_info.is_tracing += 1 + * try: # <<<<<<<<<<<<<< + * pydev_step_cmd = additional_info.pydev_step_cmd + * is_stepping = pydev_step_cmd != -1 + */ + /*try:*/ { + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_8, &__pyx_t_9, &__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_10); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1738 + * additional_info.is_tracing += 1 + * try: + * pydev_step_cmd = additional_info.pydev_step_cmd # <<<<<<<<<<<<<< + * is_stepping = pydev_step_cmd != -1 + * if py_db.pydb_disposed: + */ + __pyx_t_11 = __pyx_v_additional_info->pydev_step_cmd; + __pyx_v_pydev_step_cmd = __pyx_t_11; + + /* "_pydevd_bundle/pydevd_cython.pyx":1739 + * try: + * pydev_step_cmd = additional_info.pydev_step_cmd + * is_stepping = pydev_step_cmd != -1 # <<<<<<<<<<<<<< + * if py_db.pydb_disposed: + * return None if event == 'call' else NO_FTRACE + */ + __pyx_v_is_stepping = (__pyx_v_pydev_step_cmd != -1L); + + /* "_pydevd_bundle/pydevd_cython.pyx":1740 + * pydev_step_cmd = additional_info.pydev_step_cmd + * is_stepping = pydev_step_cmd != -1 + * if py_db.pydb_disposed: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_pydb_disposed); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1740, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1740, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1741 + * is_stepping = pydev_step_cmd != -1 + * if py_db.pydb_disposed: + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * + * # if thread is not alive, cancel trace_dispatch processing + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1741, __pyx_L7_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1741, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1740 + * pydev_step_cmd = additional_info.pydev_step_cmd + * is_stepping = pydev_step_cmd != -1 + * if py_db.pydb_disposed: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1744 + * + * # if thread is not alive, cancel trace_dispatch processing + * if not is_thread_alive(t): # <<<<<<<<<<<<<< + * py_db.notify_thread_not_alive(get_current_thread_id(t)) + * return None if event == 'call' else NO_FTRACE + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_is_thread_alive); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1744, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + __pyx_t_1 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_5, __pyx_v_t) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_v_t); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1744, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1744, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = ((!__pyx_t_7) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1745 + * # if thread is not alive, cancel trace_dispatch processing + * if not is_thread_alive(t): + * py_db.notify_thread_not_alive(get_current_thread_id(t)) # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_notify_thread_not_alive); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1745, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_get_current_thread_id); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1745, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_5 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_3, __pyx_v_t) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_t); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1745, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1745, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1746 + * if not is_thread_alive(t): + * py_db.notify_thread_not_alive(get_current_thread_id(t)) + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * + * # Note: it's important that the context name is also given because we may hit something once + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_12 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1746, __pyx_L7_error) + if (__pyx_t_12) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1746, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1744 + * + * # if thread is not alive, cancel trace_dispatch processing + * if not is_thread_alive(t): # <<<<<<<<<<<<<< + * py_db.notify_thread_not_alive(get_current_thread_id(t)) + * return None if event == 'call' else NO_FTRACE + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1750 + * # Note: it's important that the context name is also given because we may hit something once + * # in the global context and another in the local context. + * frame_cache_key = frame.f_code # <<<<<<<<<<<<<< + * if frame_cache_key in cache_skips: + * if not is_stepping: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1750, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_frame_cache_key = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1751 + * # in the global context and another in the local context. + * frame_cache_key = frame.f_code + * if frame_cache_key in cache_skips: # <<<<<<<<<<<<<< + * if not is_stepping: + * # if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + */ + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 1751, __pyx_L7_error) + } + __pyx_t_12 = (__Pyx_PyDict_ContainsTF(__pyx_v_frame_cache_key, __pyx_v_cache_skips, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1751, __pyx_L7_error) + __pyx_t_7 = (__pyx_t_12 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1752 + * frame_cache_key = frame.f_code + * if frame_cache_key in cache_skips: + * if not is_stepping: # <<<<<<<<<<<<<< + * # if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE + */ + __pyx_t_7 = ((!(__pyx_v_is_stepping != 0)) != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1754 + * if not is_stepping: + * # if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * else: + * # When stepping we can't take into account caching based on the breakpoints (only global filtering). + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1754, __pyx_L7_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1754, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1752 + * frame_cache_key = frame.f_code + * if frame_cache_key in cache_skips: + * if not is_stepping: # <<<<<<<<<<<<<< + * # if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1757 + * else: + * # When stepping we can't take into account caching based on the breakpoints (only global filtering). + * if cache_skips.get(frame_cache_key) == 1: # <<<<<<<<<<<<<< + * + * if additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: + */ + /*else*/ { + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(0, 1757, __pyx_L7_error) + } + __pyx_t_1 = __Pyx_PyDict_GetItemDefault(__pyx_v_cache_skips, __pyx_v_frame_cache_key, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1757, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyInt_EqObjC(__pyx_t_1, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1757, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1757, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1759 + * if cache_skips.get(frame_cache_key) == 1: + * + * if additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: # <<<<<<<<<<<<<< + * notify_skipped_step_in_because_of_filters(py_db, frame) + * + */ + switch (__pyx_v_additional_info->pydev_original_step_cmd) { + case 0x6B: + case 0x90: + __pyx_t_12 = 1; + break; + default: + __pyx_t_12 = 0; + break; + } + __pyx_t_13 = (__pyx_t_12 != 0); + if (__pyx_t_13) { + } else { + __pyx_t_7 = __pyx_t_13; + goto __pyx_L19_bool_binop_done; + } + __pyx_t_13 = __Pyx_PyObject_IsTrue(__pyx_v_14_pydevd_bundle_13pydevd_cython__global_notify_skipped_step_in); if (unlikely(__pyx_t_13 < 0)) __PYX_ERR(0, 1759, __pyx_L7_error) + __pyx_t_12 = ((!__pyx_t_13) != 0); + __pyx_t_7 = __pyx_t_12; + __pyx_L19_bool_binop_done:; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1760 + * + * if additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: + * notify_skipped_step_in_because_of_filters(py_db, frame) # <<<<<<<<<<<<<< + * + * back_frame = frame.f_back + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_notify_skipped_step_in_because_o); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1760, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_py_db, __pyx_v_frame}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1760, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_6); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_py_db, __pyx_v_frame}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1760, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_6); + } else + #endif + { + __pyx_t_4 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1760, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_11, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_11, __pyx_v_frame); + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1760, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1759 + * if cache_skips.get(frame_cache_key) == 1: + * + * if additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: # <<<<<<<<<<<<<< + * notify_skipped_step_in_because_of_filters(py_db, frame) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1762 + * notify_skipped_step_in_because_of_filters(py_db, frame) + * + * back_frame = frame.f_back # <<<<<<<<<<<<<< + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + * back_frame_cache_key = back_frame.f_code + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1762, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_v_back_frame = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1763 + * + * back_frame = frame.f_back + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): # <<<<<<<<<<<<<< + * back_frame_cache_key = back_frame.f_code + * if cache_skips.get(back_frame_cache_key) == 1: + */ + __pyx_t_12 = (__pyx_v_back_frame != Py_None); + __pyx_t_13 = (__pyx_t_12 != 0); + if (__pyx_t_13) { + } else { + __pyx_t_7 = __pyx_t_13; + goto __pyx_L22_bool_binop_done; + } + switch (__pyx_v_pydev_step_cmd) { + case 0x6B: + case 0x90: + case 0x6D: + case 0xA0: + __pyx_t_13 = 1; + break; + default: + __pyx_t_13 = 0; + break; + } + __pyx_t_12 = (__pyx_t_13 != 0); + __pyx_t_7 = __pyx_t_12; + __pyx_L22_bool_binop_done:; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1764 + * back_frame = frame.f_back + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + * back_frame_cache_key = back_frame.f_code # <<<<<<<<<<<<<< + * if cache_skips.get(back_frame_cache_key) == 1: + * # if DEBUG: print('skipped: trace_dispatch (cache hit: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_back_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1764, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_v_back_frame_cache_key = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1765 + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + * back_frame_cache_key = back_frame.f_code + * if cache_skips.get(back_frame_cache_key) == 1: # <<<<<<<<<<<<<< + * # if DEBUG: print('skipped: trace_dispatch (cache hit: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE + */ + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(0, 1765, __pyx_L7_error) + } + __pyx_t_6 = __Pyx_PyDict_GetItemDefault(__pyx_v_cache_skips, __pyx_v_back_frame_cache_key, Py_None); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1765, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_t_6, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1765, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1765, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1767 + * if cache_skips.get(back_frame_cache_key) == 1: + * # if DEBUG: print('skipped: trace_dispatch (cache hit: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * else: + * # if DEBUG: print('skipped: trace_dispatch (cache hit: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1767, __pyx_L7_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1767, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1765 + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + * back_frame_cache_key = back_frame.f_code + * if cache_skips.get(back_frame_cache_key) == 1: # <<<<<<<<<<<<<< + * # if DEBUG: print('skipped: trace_dispatch (cache hit: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1763 + * + * back_frame = frame.f_back + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): # <<<<<<<<<<<<<< + * back_frame_cache_key = back_frame.f_code + * if cache_skips.get(back_frame_cache_key) == 1: + */ + goto __pyx_L21; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1770 + * else: + * # if DEBUG: print('skipped: trace_dispatch (cache hit: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * + * try: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1770, __pyx_L7_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1770, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + } + __pyx_L21:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1757 + * else: + * # When stepping we can't take into account caching based on the breakpoints (only global filtering). + * if cache_skips.get(frame_cache_key) == 1: # <<<<<<<<<<<<<< + * + * if additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: + */ + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1751 + * # in the global context and another in the local context. + * frame_cache_key = frame.f_code + * if frame_cache_key in cache_skips: # <<<<<<<<<<<<<< + * if not is_stepping: + * # if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1772 + * return None if event == 'call' else NO_FTRACE + * + * try: # <<<<<<<<<<<<<< + * # Make fast path faster! + * abs_path_canonical_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_14, &__pyx_t_15, &__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1774 + * try: + * # Make fast path faster! + * abs_path_canonical_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] # <<<<<<<<<<<<<< + * except: + * abs_path_canonical_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1774, __pyx_L25_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1774, __pyx_L25_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1774, __pyx_L25_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = __Pyx_PyObject_GetItem(__pyx_t_1, __pyx_t_4); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1774, __pyx_L25_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!(likely(PyTuple_CheckExact(__pyx_t_6))||((__pyx_t_6) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_6)->tp_name), 0))) __PYX_ERR(0, 1774, __pyx_L25_error) + __pyx_v_abs_path_canonical_path_and_base = ((PyObject*)__pyx_t_6); + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1772 + * return None if event == 'call' else NO_FTRACE + * + * try: # <<<<<<<<<<<<<< + * # Make fast path faster! + * abs_path_canonical_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + */ + } + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + goto __pyx_L30_try_end; + __pyx_L25_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1775 + * # Make fast path faster! + * abs_path_canonical_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + * except: # <<<<<<<<<<<<<< + * abs_path_canonical_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_4, &__pyx_t_1) < 0) __PYX_ERR(0, 1775, __pyx_L27_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":1776 + * abs_path_canonical_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + * except: + * abs_path_canonical_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) # <<<<<<<<<<<<<< + * + * file_type = py_db.get_file_type(frame, abs_path_canonical_path_and_base) # we don't want to debug threading or anything related to pydevd + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1776, __pyx_L27_except_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_5 = (__pyx_t_2) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_2, __pyx_v_frame) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_frame); + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1776, __pyx_L27_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PyTuple_CheckExact(__pyx_t_5))||((__pyx_t_5) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_5)->tp_name), 0))) __PYX_ERR(0, 1776, __pyx_L27_except_error) + __Pyx_XDECREF_SET(__pyx_v_abs_path_canonical_path_and_base, ((PyObject*)__pyx_t_5)); + __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L26_exception_handled; + } + __pyx_L27_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1772 + * return None if event == 'call' else NO_FTRACE + * + * try: # <<<<<<<<<<<<<< + * # Make fast path faster! + * abs_path_canonical_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + */ + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16); + goto __pyx_L7_error; + __pyx_L26_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16); + __pyx_L30_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1778 + * abs_path_canonical_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + * + * file_type = py_db.get_file_type(frame, abs_path_canonical_path_and_base) # we don't want to debug threading or anything related to pydevd # <<<<<<<<<<<<<< + * + * if file_type is not None: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_get_file_type); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1778, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_frame, __pyx_v_abs_path_canonical_path_and_base}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1778, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_frame, __pyx_v_abs_path_canonical_path_and_base}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1778, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_5 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1778, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_11, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_abs_path_canonical_path_and_base); + __Pyx_GIVEREF(__pyx_v_abs_path_canonical_path_and_base); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_11, __pyx_v_abs_path_canonical_path_and_base); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1778, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_file_type = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1780 + * file_type = py_db.get_file_type(frame, abs_path_canonical_path_and_base) # we don't want to debug threading or anything related to pydevd + * + * if file_type is not None: # <<<<<<<<<<<<<< + * if file_type == 1: # inlining LIB_FILE = 1 + * if not py_db.in_project_scope(frame, abs_path_canonical_path_and_base[0]): + */ + __pyx_t_7 = (__pyx_v_file_type != Py_None); + __pyx_t_12 = (__pyx_t_7 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1781 + * + * if file_type is not None: + * if file_type == 1: # inlining LIB_FILE = 1 # <<<<<<<<<<<<<< + * if not py_db.in_project_scope(frame, abs_path_canonical_path_and_base[0]): + * # if DEBUG: print('skipped: trace_dispatch (not in scope)', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_file_type, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1781, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1781, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1782 + * if file_type is not None: + * if file_type == 1: # inlining LIB_FILE = 1 + * if not py_db.in_project_scope(frame, abs_path_canonical_path_and_base[0]): # <<<<<<<<<<<<<< + * # if DEBUG: print('skipped: trace_dispatch (not in scope)', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[frame_cache_key] = 1 + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_in_project_scope); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1782, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + if (unlikely(__pyx_v_abs_path_canonical_path_and_base == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1782, __pyx_L7_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_abs_path_canonical_path_and_base, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1782, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_frame, __pyx_t_5}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1782, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_frame, __pyx_t_5}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1782, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1782, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_11, __pyx_v_frame); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_11, __pyx_t_5); + __pyx_t_5 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1782, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1782, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_7 = ((!__pyx_t_12) != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1784 + * if not py_db.in_project_scope(frame, abs_path_canonical_path_and_base[0]): + * # if DEBUG: print('skipped: trace_dispatch (not in scope)', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[frame_cache_key] = 1 # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * else: + */ + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1784, __pyx_L7_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_cache_skips, __pyx_v_frame_cache_key, __pyx_int_1) < 0)) __PYX_ERR(0, 1784, __pyx_L7_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1785 + * # if DEBUG: print('skipped: trace_dispatch (not in scope)', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[frame_cache_key] = 1 + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * else: + * # if DEBUG: print('skipped: trace_dispatch', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1785, __pyx_L7_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1785, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __pyx_t_4; + __pyx_t_4 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1782 + * if file_type is not None: + * if file_type == 1: # inlining LIB_FILE = 1 + * if not py_db.in_project_scope(frame, abs_path_canonical_path_and_base[0]): # <<<<<<<<<<<<<< + * # if DEBUG: print('skipped: trace_dispatch (not in scope)', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[frame_cache_key] = 1 + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1781 + * + * if file_type is not None: + * if file_type == 1: # inlining LIB_FILE = 1 # <<<<<<<<<<<<<< + * if not py_db.in_project_scope(frame, abs_path_canonical_path_and_base[0]): + * # if DEBUG: print('skipped: trace_dispatch (not in scope)', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + */ + goto __pyx_L34; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1788 + * else: + * # if DEBUG: print('skipped: trace_dispatch', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[frame_cache_key] = 1 # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * + */ + /*else*/ { + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1788, __pyx_L7_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_cache_skips, __pyx_v_frame_cache_key, __pyx_int_1) < 0)) __PYX_ERR(0, 1788, __pyx_L7_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1789 + * # if DEBUG: print('skipped: trace_dispatch', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[frame_cache_key] = 1 + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * + * if py_db.is_files_filter_enabled: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1789, __pyx_L7_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1789, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __pyx_t_4; + __pyx_t_4 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + } + __pyx_L34:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1780 + * file_type = py_db.get_file_type(frame, abs_path_canonical_path_and_base) # we don't want to debug threading or anything related to pydevd + * + * if file_type is not None: # <<<<<<<<<<<<<< + * if file_type == 1: # inlining LIB_FILE = 1 + * if not py_db.in_project_scope(frame, abs_path_canonical_path_and_base[0]): + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1791 + * return None if event == 'call' else NO_FTRACE + * + * if py_db.is_files_filter_enabled: # <<<<<<<<<<<<<< + * if py_db.apply_files_filter(frame, abs_path_canonical_path_and_base[0], False): + * cache_skips[frame_cache_key] = 1 + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_is_files_filter_enabled); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1791, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1791, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1792 + * + * if py_db.is_files_filter_enabled: + * if py_db.apply_files_filter(frame, abs_path_canonical_path_and_base[0], False): # <<<<<<<<<<<<<< + * cache_skips[frame_cache_key] = 1 + * + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1792, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + if (unlikely(__pyx_v_abs_path_canonical_path_and_base == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1792, __pyx_L7_error) + } + __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_abs_path_canonical_path_and_base, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1792, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_frame, __pyx_t_3, Py_False}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1792, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_frame, __pyx_t_3, Py_False}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1792, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(3+__pyx_t_11); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1792, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_11, __pyx_v_frame); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_11, __pyx_t_3); + __Pyx_INCREF(Py_False); + __Pyx_GIVEREF(Py_False); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_11, Py_False); + __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1792, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1792, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1793 + * if py_db.is_files_filter_enabled: + * if py_db.apply_files_filter(frame, abs_path_canonical_path_and_base[0], False): + * cache_skips[frame_cache_key] = 1 # <<<<<<<<<<<<<< + * + * if is_stepping and additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: + */ + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1793, __pyx_L7_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_cache_skips, __pyx_v_frame_cache_key, __pyx_int_1) < 0)) __PYX_ERR(0, 1793, __pyx_L7_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1795 + * cache_skips[frame_cache_key] = 1 + * + * if is_stepping and additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: # <<<<<<<<<<<<<< + * notify_skipped_step_in_because_of_filters(py_db, frame) + * + */ + __pyx_t_12 = (__pyx_v_is_stepping != 0); + if (__pyx_t_12) { + } else { + __pyx_t_7 = __pyx_t_12; + goto __pyx_L39_bool_binop_done; + } + switch (__pyx_v_additional_info->pydev_original_step_cmd) { + case 0x6B: + case 0x90: + __pyx_t_12 = 1; + break; + default: + __pyx_t_12 = 0; + break; + } + __pyx_t_13 = (__pyx_t_12 != 0); + if (__pyx_t_13) { + } else { + __pyx_t_7 = __pyx_t_13; + goto __pyx_L39_bool_binop_done; + } + __pyx_t_13 = __Pyx_PyObject_IsTrue(__pyx_v_14_pydevd_bundle_13pydevd_cython__global_notify_skipped_step_in); if (unlikely(__pyx_t_13 < 0)) __PYX_ERR(0, 1795, __pyx_L7_error) + __pyx_t_12 = ((!__pyx_t_13) != 0); + __pyx_t_7 = __pyx_t_12; + __pyx_L39_bool_binop_done:; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1796 + * + * if is_stepping and additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: + * notify_skipped_step_in_because_of_filters(py_db, frame) # <<<<<<<<<<<<<< + * + * # A little gotcha, sometimes when we're stepping in we have to stop in a + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_notify_skipped_step_in_because_o); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1796, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_py_db, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1796, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_py_db, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1796, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_3 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1796, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_11, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_11, __pyx_v_frame); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1796, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1795 + * cache_skips[frame_cache_key] = 1 + * + * if is_stepping and additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: # <<<<<<<<<<<<<< + * notify_skipped_step_in_because_of_filters(py_db, frame) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1801 + * # return event showing the back frame as the current frame, so, we need + * # to check not only the current frame but the back frame too. + * back_frame = frame.f_back # <<<<<<<<<<<<<< + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + * if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False): + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1801, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XDECREF_SET(__pyx_v_back_frame, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1802 + * # to check not only the current frame but the back frame too. + * back_frame = frame.f_back + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): # <<<<<<<<<<<<<< + * if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False): + * back_frame_cache_key = back_frame.f_code + */ + __pyx_t_12 = (__pyx_v_back_frame != Py_None); + __pyx_t_13 = (__pyx_t_12 != 0); + if (__pyx_t_13) { + } else { + __pyx_t_7 = __pyx_t_13; + goto __pyx_L43_bool_binop_done; + } + switch (__pyx_v_pydev_step_cmd) { + case 0x6B: + case 0x90: + case 0x6D: + case 0xA0: + __pyx_t_13 = 1; + break; + default: + __pyx_t_13 = 0; + break; + } + __pyx_t_12 = (__pyx_t_13 != 0); + __pyx_t_7 = __pyx_t_12; + __pyx_L43_bool_binop_done:; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1803 + * back_frame = frame.f_back + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + * if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False): # <<<<<<<<<<<<<< + * back_frame_cache_key = back_frame.f_code + * cache_skips[back_frame_cache_key] = 1 + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_apply_files_filter); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1803, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_back_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1803, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1803, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_back_frame, __pyx_t_6, Py_False}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1803, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_back_frame, __pyx_t_6, Py_False}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1803, __pyx_L7_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(3+__pyx_t_11); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1803, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_back_frame); + __Pyx_GIVEREF(__pyx_v_back_frame); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_11, __pyx_v_back_frame); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_11, __pyx_t_6); + __Pyx_INCREF(Py_False); + __Pyx_GIVEREF(Py_False); + PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_11, Py_False); + __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1803, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1803, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1804 + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + * if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False): + * back_frame_cache_key = back_frame.f_code # <<<<<<<<<<<<<< + * cache_skips[back_frame_cache_key] = 1 + * # if DEBUG: print('skipped: trace_dispatch (filtered out: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_back_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1804, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XDECREF_SET(__pyx_v_back_frame_cache_key, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1805 + * if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False): + * back_frame_cache_key = back_frame.f_code + * cache_skips[back_frame_cache_key] = 1 # <<<<<<<<<<<<<< + * # if DEBUG: print('skipped: trace_dispatch (filtered out: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE + */ + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1805, __pyx_L7_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_cache_skips, __pyx_v_back_frame_cache_key, __pyx_int_1) < 0)) __PYX_ERR(0, 1805, __pyx_L7_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1807 + * cache_skips[back_frame_cache_key] = 1 + * # if DEBUG: print('skipped: trace_dispatch (filtered out: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * else: + * # if DEBUG: print('skipped: trace_dispatch (filtered out: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1807, __pyx_L7_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1807, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __pyx_t_4; + __pyx_t_4 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1803 + * back_frame = frame.f_back + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + * if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False): # <<<<<<<<<<<<<< + * back_frame_cache_key = back_frame.f_code + * cache_skips[back_frame_cache_key] = 1 + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1802 + * # to check not only the current frame but the back frame too. + * back_frame = frame.f_back + * if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): # <<<<<<<<<<<<<< + * if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False): + * back_frame_cache_key = back_frame.f_code + */ + goto __pyx_L42; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1810 + * else: + * # if DEBUG: print('skipped: trace_dispatch (filtered out: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * + * # if DEBUG: print('trace_dispatch', filename, frame.f_lineno, event, frame.f_code.co_name, file_type) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1810, __pyx_L7_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1810, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __pyx_t_4; + __pyx_t_4 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + } + __pyx_L42:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1792 + * + * if py_db.is_files_filter_enabled: + * if py_db.apply_files_filter(frame, abs_path_canonical_path_and_base[0], False): # <<<<<<<<<<<<<< + * cache_skips[frame_cache_key] = 1 + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1791 + * return None if event == 'call' else NO_FTRACE + * + * if py_db.is_files_filter_enabled: # <<<<<<<<<<<<<< + * if py_db.apply_files_filter(frame, abs_path_canonical_path_and_base[0], False): + * cache_skips[frame_cache_key] = 1 + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1818 + * ret = PyDBFrame( + * ( + * py_db, abs_path_canonical_path_and_base, additional_info, t, frame_skips_cache, frame_cache_key, # <<<<<<<<<<<<<< + * ) + * ).trace_dispatch(frame, event, arg) + */ + __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1818, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_abs_path_canonical_path_and_base); + __Pyx_GIVEREF(__pyx_v_abs_path_canonical_path_and_base); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_abs_path_canonical_path_and_base); + __Pyx_INCREF(((PyObject *)__pyx_v_additional_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_additional_info)); + PyTuple_SET_ITEM(__pyx_t_1, 2, ((PyObject *)__pyx_v_additional_info)); + __Pyx_INCREF(__pyx_v_t); + __Pyx_GIVEREF(__pyx_v_t); + PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_v_t); + __Pyx_INCREF(__pyx_v_frame_skips_cache); + __Pyx_GIVEREF(__pyx_v_frame_skips_cache); + PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_v_frame_skips_cache); + __Pyx_INCREF(__pyx_v_frame_cache_key); + __Pyx_GIVEREF(__pyx_v_frame_cache_key); + PyTuple_SET_ITEM(__pyx_t_1, 5, __pyx_v_frame_cache_key); + + /* "_pydevd_bundle/pydevd_cython.pyx":1816 + * # Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak + * # reference to the frame). + * ret = PyDBFrame( # <<<<<<<<<<<<<< + * ( + * py_db, abs_path_canonical_path_and_base, additional_info, t, frame_skips_cache, frame_cache_key, + */ + __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame), __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1816, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1820 + * py_db, abs_path_canonical_path_and_base, additional_info, t, frame_skips_cache, frame_cache_key, + * ) + * ).trace_dispatch(frame, event, arg) # <<<<<<<<<<<<<< + * if ret is None: + * # 1 means skipped because of filters. + */ + if (!(likely(PyString_CheckExact(__pyx_v_event))||((__pyx_v_event) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_event)->tp_name), 0))) __PYX_ERR(0, 1820, __pyx_L7_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_t_4)->__pyx_vtab)->trace_dispatch(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_t_4), __pyx_v_frame, ((PyObject*)__pyx_v_event), __pyx_v_arg, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1820, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_ret = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1821 + * ) + * ).trace_dispatch(frame, event, arg) + * if ret is None: # <<<<<<<<<<<<<< + * # 1 means skipped because of filters. + * # 2 means skipped because no breakpoints were hit. + */ + __pyx_t_7 = (__pyx_v_ret == Py_None); + __pyx_t_12 = (__pyx_t_7 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1824 + * # 1 means skipped because of filters. + * # 2 means skipped because no breakpoints were hit. + * cache_skips[frame_cache_key] = 2 # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * + */ + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1824, __pyx_L7_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_cache_skips, __pyx_v_frame_cache_key, __pyx_int_2) < 0)) __PYX_ERR(0, 1824, __pyx_L7_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1825 + * # 2 means skipped because no breakpoints were hit. + * cache_skips[frame_cache_key] = 2 + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_12 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1825, __pyx_L7_error) + if (__pyx_t_12) { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1825, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __pyx_t_4; + __pyx_t_4 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L11_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1821 + * ) + * ).trace_dispatch(frame, event, arg) + * if ret is None: # <<<<<<<<<<<<<< + * # 1 means skipped because of filters. + * # 2 means skipped because no breakpoints were hit. + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1828 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * frame.f_trace = SafeCallWrapper(ret) # Make sure we keep the returned tracer. # <<<<<<<<<<<<<< + * # ELSE + * # frame.f_trace = ret # Make sure we keep the returned tracer. + */ + __pyx_t_1 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), __pyx_v_ret); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1828, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_1) < 0) __PYX_ERR(0, 1828, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1832 + * # frame.f_trace = ret # Make sure we keep the returned tracer. + * # ENDIF + * return ret # <<<<<<<<<<<<<< + * + * except SystemExit: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_ret); + __pyx_r = __pyx_v_ret; + goto __pyx_L11_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1737 + * + * additional_info.is_tracing += 1 + * try: # <<<<<<<<<<<<<< + * pydev_step_cmd = additional_info.pydev_step_cmd + * is_stepping = pydev_step_cmd != -1 + */ + } + __pyx_L7_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1834 + * return ret + * + * except SystemExit: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE + * + */ + __pyx_t_11 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_SystemExit); + if (__pyx_t_11) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_4, &__pyx_t_5) < 0) __PYX_ERR(0, 1834, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_5); + + /* "_pydevd_bundle/pydevd_cython.pyx":1835 + * + * except SystemExit: + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * + * except Exception: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_12 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1835, __pyx_L9_except_error) + if (__pyx_t_12) { + __Pyx_INCREF(Py_None); + __pyx_t_6 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1835, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __pyx_t_3; + __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L10_except_return; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1837 + * return None if event == 'call' else NO_FTRACE + * + * except Exception: # <<<<<<<<<<<<<< + * if py_db.pydb_disposed: + * return None if event == 'call' else NO_FTRACE # Don't log errors when we're shutting down. + */ + __pyx_t_11 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_11) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_4, &__pyx_t_1) < 0) __PYX_ERR(0, 1837, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":1838 + * + * except Exception: + * if py_db.pydb_disposed: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE # Don't log errors when we're shutting down. + * # Log it + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_pydb_disposed); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1838, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1838, __pyx_L9_except_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1839 + * except Exception: + * if py_db.pydb_disposed: + * return None if event == 'call' else NO_FTRACE # Don't log errors when we're shutting down. # <<<<<<<<<<<<<< + * # Log it + * try: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_12 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1839, __pyx_L9_except_error) + if (__pyx_t_12) { + __Pyx_INCREF(Py_None); + __pyx_t_6 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1839, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __pyx_t_3; + __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L10_except_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1838 + * + * except Exception: + * if py_db.pydb_disposed: # <<<<<<<<<<<<<< + * return None if event == 'call' else NO_FTRACE # Don't log errors when we're shutting down. + * # Log it + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1841 + * return None if event == 'call' else NO_FTRACE # Don't log errors when we're shutting down. + * # Log it + * try: # <<<<<<<<<<<<<< + * if pydev_log_exception is not None: + * # This can actually happen during the interpreter shutdown in Python 2.7 + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_16, &__pyx_t_15, &__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_14); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1842 + * # Log it + * try: + * if pydev_log_exception is not None: # <<<<<<<<<<<<<< + * # This can actually happen during the interpreter shutdown in Python 2.7 + * pydev_log_exception() + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pydev_log_exception); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1842, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_12 = (__pyx_t_6 != Py_None); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_7 = (__pyx_t_12 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1844 + * if pydev_log_exception is not None: + * # This can actually happen during the interpreter shutdown in Python 2.7 + * pydev_log_exception() # <<<<<<<<<<<<<< + * except: + * # Error logging? We're really in the interpreter shutdown... + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pydev_log_exception); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1844, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_6 = (__pyx_t_2) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_2) : __Pyx_PyObject_CallNoArg(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1844, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1842 + * # Log it + * try: + * if pydev_log_exception is not None: # <<<<<<<<<<<<<< + * # This can actually happen during the interpreter shutdown in Python 2.7 + * pydev_log_exception() + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1841 + * return None if event == 'call' else NO_FTRACE # Don't log errors when we're shutting down. + * # Log it + * try: # <<<<<<<<<<<<<< + * if pydev_log_exception is not None: + * # This can actually happen during the interpreter shutdown in Python 2.7 + */ + } + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + goto __pyx_L59_try_end; + __pyx_L52_error:; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1845 + * # This can actually happen during the interpreter shutdown in Python 2.7 + * pydev_log_exception() + * except: # <<<<<<<<<<<<<< + * # Error logging? We're really in the interpreter shutdown... + * # (https://github.com/fabioz/PyDev.Debugger/issues/8) + */ + /*except:*/ { + __Pyx_ErrRestore(0,0,0); + goto __pyx_L53_exception_handled; + } + __pyx_L53_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_15, __pyx_t_14); + __pyx_L59_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1849 + * # (https://github.com/fabioz/PyDev.Debugger/issues/8) + * pass + * return None if event == 'call' else NO_FTRACE # <<<<<<<<<<<<<< + * finally: + * additional_info.is_tracing -= 1 + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 1849, __pyx_L9_except_error) + if (__pyx_t_7) { + __Pyx_INCREF(Py_None); + __pyx_t_6 = Py_None; + } else { + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1849, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __pyx_t_3; + __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L10_except_return; + } + goto __pyx_L9_except_error; + __pyx_L9_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1737 + * + * additional_info.is_tracing += 1 + * try: # <<<<<<<<<<<<<< + * pydev_step_cmd = additional_info.pydev_step_cmd + * is_stepping = pydev_step_cmd != -1 + */ + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_9, __pyx_t_10); + goto __pyx_L5_error; + __pyx_L11_try_return:; + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_9, __pyx_t_10); + goto __pyx_L4_return; + __pyx_L10_except_return:; + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_9, __pyx_t_10); + goto __pyx_L4_return; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1851 + * return None if event == 'call' else NO_FTRACE + * finally: + * additional_info.is_tracing -= 1 # <<<<<<<<<<<<<< + * + * + */ + /*finally:*/ { + __pyx_L5_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_10 = 0; __pyx_t_9 = 0; __pyx_t_8 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_14, &__pyx_t_15, &__pyx_t_16); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_10, &__pyx_t_9, &__pyx_t_8) < 0)) __Pyx_ErrFetch(&__pyx_t_10, &__pyx_t_9, &__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __pyx_t_11 = __pyx_lineno; __pyx_t_17 = __pyx_clineno; __pyx_t_18 = __pyx_filename; + { + __pyx_v_additional_info->is_tracing = (__pyx_v_additional_info->is_tracing - 1); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16); + } + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_ErrRestore(__pyx_t_10, __pyx_t_9, __pyx_t_8); + __pyx_t_10 = 0; __pyx_t_9 = 0; __pyx_t_8 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; + __pyx_lineno = __pyx_t_11; __pyx_clineno = __pyx_t_17; __pyx_filename = __pyx_t_18; + goto __pyx_L1_error; + } + __pyx_L4_return: { + __pyx_t_16 = __pyx_r; + __pyx_r = 0; + __pyx_v_additional_info->is_tracing = (__pyx_v_additional_info->is_tracing - 1); + __pyx_r = __pyx_t_16; + __pyx_t_16 = 0; + goto __pyx_L0; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1706 + * # ENDIF + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * ''' This is the callback used when we enter some context in the debugger. + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_frame_cache_key); + __Pyx_XDECREF(__pyx_v_cache_skips); + __Pyx_XDECREF(__pyx_v_abs_path_canonical_path_and_base); + __Pyx_XDECREF((PyObject *)__pyx_v_additional_info); + __Pyx_XDECREF(__pyx_v_py_db); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_XDECREF(__pyx_v_frame_skips_cache); + __Pyx_XDECREF(__pyx_v_back_frame); + __Pyx_XDECREF(__pyx_v_back_frame_cache_key); + __Pyx_XDECREF(__pyx_v_file_type); + __Pyx_XDECREF(__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1696 + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class ThreadTracer: + * cdef public tuple _args; # <<<<<<<<<<<<<< + * def __init__(self, tuple args): + * self._args = args + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_args); + __pyx_r = __pyx_v_self->_args; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyTuple_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 1696, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer._args.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_4__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_4__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self._args,) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->_args); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self._args,) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self._args,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); + __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self._args is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self._args,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self._args is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state + */ + /*else*/ { + __pyx_t_3 = (__pyx_v_self->_args != ((PyObject*)Py_None)); + __pyx_v_use_setstate = __pyx_t_3; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._args is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state + * else: + */ + __pyx_t_3 = (__pyx_v_use_setstate != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":13 + * use_setstate = self._args is not None + * if use_setstate: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_ThreadTracer); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_64458794); + __Pyx_GIVEREF(__pyx_int_64458794); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_64458794); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._args is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state + * else: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_ThreadTracer__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle_ThreadTracer); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_64458794); + __Pyx_GIVEREF(__pyx_int_64458794); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_64458794); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __pyx_t_5 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadTracer__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_6__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_6__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_ThreadTracer__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_ThreadTracer__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadTracer__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1866 + * _original_call = ThreadTracer.__call__ + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * constructed_tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_11__call__(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_11__call__ = {"__call__", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_11__call__, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_11__call__(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_self = 0; + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[4] = {0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_self)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 4, 4, 1); __PYX_ERR(0, 1866, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 4, 4, 2); __PYX_ERR(0, 1866, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 4, 4, 3); __PYX_ERR(0, 1866, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__call__") < 0)) __PYX_ERR(0, 1866, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 4) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + } + __pyx_v_self = values[0]; + __pyx_v_frame = values[1]; + __pyx_v_event = values[2]; + __pyx_v_arg = values[3]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__call__", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1866, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_10__call__(__pyx_self, __pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_10__call__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__call__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1867 + * + * def __call__(self, frame, event, arg): + * constructed_tid_to_last_frame[self._args[1].ident] = frame # <<<<<<<<<<<<<< + * return _original_call(self, frame, event, arg) + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_constructed_tid_to_last_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1867, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_args_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1867, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_GetItemInt(__pyx_t_2, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1867, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_ident); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1867, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(PyObject_SetItem(__pyx_t_1, __pyx_t_2, __pyx_v_frame) < 0)) __PYX_ERR(0, 1867, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1868 + * def __call__(self, frame, event, arg): + * constructed_tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) # <<<<<<<<<<<<<< + * + * ThreadTracer.__call__ = __call__ + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_original_call); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1868, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_4 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[5] = {__pyx_t_3, __pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_4, 4+__pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1868, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[5] = {__pyx_t_3, __pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_4, 4+__pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1868, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_5 = PyTuple_New(4+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1868, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_self); + __Pyx_GIVEREF(__pyx_v_self); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_v_self); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_4, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_5, 3+__pyx_t_4, __pyx_v_arg); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_5, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1868, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1866 + * _original_call = ThreadTracer.__call__ + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * constructed_tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_13__pyx_unpickle_PyDBAdditionalThreadInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_13__pyx_unpickle_PyDBAdditionalThreadInfo = {"__pyx_unpickle_PyDBAdditionalThreadInfo", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_13__pyx_unpickle_PyDBAdditionalThreadInfo, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_13__pyx_unpickle_PyDBAdditionalThreadInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBAdditionalThreadInfo (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBAdditionalThreadInfo", 1, 3, 3, 1); __PYX_ERR(2, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBAdditionalThreadInfo", 1, 3, 3, 2); __PYX_ERR(2, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_PyDBAdditionalThreadInfo") < 0)) __PYX_ERR(2, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBAdditionalThreadInfo", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBAdditionalThreadInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12__pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12__pyx_unpickle_PyDBAdditionalThreadInfo(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBAdditionalThreadInfo", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x75b3b02, 0x5f02be1, 0xa5a0d63): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x75b3b02, 0x5f02be1, 0xa5a0d63) = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_original_step_cmd, pydev_smart_child_offset, pydev_smart_parent_offset, pydev_smart_step_into_variants, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, pydev_use_scoped_step_frame, step_in_initial_location, suspend_type, suspended_at_unhandled, target_id_to_smart_step_into_variant, thread_tracer, top_level_thread_tracer_no_back_frames, top_level_thread_tracer_unhandled, trace_suspend_type))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__9, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0x75b3b02, 0x5f02be1, 0xa5a0d63): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x75b3b02, 0x5f02be1, 0xa5a0d63) = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_original_step_cmd, pydev_smart_child_offset, pydev_smart_parent_offset, pydev_smart_step_into_variants, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, pydev_use_scoped_step_frame, step_in_initial_location, suspend_type, suspended_at_unhandled, target_id_to_smart_step_into_variant, thread_tracer, top_level_thread_tracer_no_back_frames, top_level_thread_tracer_unhandled, trace_suspend_type))" % __pyx_checksum) + * __pyx_result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0x75b3b02, 0x5f02be1, 0xa5a0d63): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x75b3b02, 0x5f02be1, 0xa5a0d63) = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_original_step_cmd, pydev_smart_child_offset, pydev_smart_parent_offset, pydev_smart_step_into_variants, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, pydev_use_scoped_step_frame, step_in_initial_location, suspend_type, suspended_at_unhandled, target_id_to_smart_step_into_variant, thread_tracer, top_level_thread_tracer_no_back_frames, top_level_thread_tracer_unhandled, trace_suspend_type))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(2, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x75b3b02, 0x5f02be1, 0xa5a0d63): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x75b3b02, 0x5f02be1, 0xa5a0d63) = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_original_step_cmd, pydev_smart_child_offset, pydev_smart_parent_offset, pydev_smart_step_into_variants, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, pydev_use_scoped_step_frame, step_in_initial_location, suspend_type, suspended_at_unhandled, target_id_to_smart_step_into_variant, thread_tracer, top_level_thread_tracer_no_back_frames, top_level_thread_tracer_unhandled, trace_suspend_type))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x75b3b02, 0x5f02be1, 0xa5a0d63) = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_original_step_cmd, pydev_smart_child_offset, pydev_smart_parent_offset, pydev_smart_step_into_variants, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, pydev_use_scoped_step_frame, step_in_initial_location, suspend_type, suspended_at_unhandled, target_id_to_smart_step_into_variant, thread_tracer, top_level_thread_tracer_no_back_frames, top_level_thread_tracer_unhandled, trace_suspend_type))" % __pyx_checksum) + * __pyx_result = PyDBAdditionalThreadInfo.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x75b3b02, 0x5f02be1, 0xa5a0d63) = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_original_step_cmd, pydev_smart_child_offset, pydev_smart_parent_offset, pydev_smart_step_into_variants, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, pydev_use_scoped_step_frame, step_in_initial_location, suspend_type, suspended_at_unhandled, target_id_to_smart_step_into_variant, thread_tracer, top_level_thread_tracer_no_back_frames, top_level_thread_tracer_unhandled, trace_suspend_type))" % __pyx_checksum) + * __pyx_result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBAdditionalThreadInfo__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x75b3b02, 0x5f02be1, 0xa5a0d63) = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_original_step_cmd, pydev_smart_child_offset, pydev_smart_parent_offset, pydev_smart_step_into_variants, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, pydev_use_scoped_step_frame, step_in_initial_location, suspend_type, suspended_at_unhandled, target_id_to_smart_step_into_variant, thread_tracer, top_level_thread_tracer_no_back_frames, top_level_thread_tracer_unhandled, trace_suspend_type))" % __pyx_checksum) + * __pyx_result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo __pyx_result, tuple __pyx_state): + * __pyx_result.conditional_breakpoint_exception = __pyx_state[0]; __pyx_result.is_tracing = __pyx_state[1]; __pyx_result.pydev_call_from_jinja2 = __pyx_state[2]; __pyx_result.pydev_call_inside_jinja2 = __pyx_state[3]; __pyx_result.pydev_django_resolve_frame = __pyx_state[4]; __pyx_result.pydev_func_name = __pyx_state[5]; __pyx_result.pydev_message = __pyx_state[6]; __pyx_result.pydev_next_line = __pyx_state[7]; __pyx_result.pydev_notify_kill = __pyx_state[8]; __pyx_result.pydev_original_step_cmd = __pyx_state[9]; __pyx_result.pydev_smart_child_offset = __pyx_state[10]; __pyx_result.pydev_smart_parent_offset = __pyx_state[11]; __pyx_result.pydev_smart_step_into_variants = __pyx_state[12]; __pyx_result.pydev_smart_step_stop = __pyx_state[13]; __pyx_result.pydev_state = __pyx_state[14]; __pyx_result.pydev_step_cmd = __pyx_state[15]; __pyx_result.pydev_step_stop = __pyx_state[16]; __pyx_result.pydev_use_scoped_step_frame = __pyx_state[17]; __pyx_result.step_in_initial_location = __pyx_state[18]; __pyx_result.suspend_type = __pyx_state[19]; __pyx_result.suspended_at_unhandled = __pyx_state[20]; __pyx_result.target_id_to_smart_step_into_variant = __pyx_state[21]; __pyx_result.thread_tracer = __pyx_state[22]; __pyx_result.top_level_thread_tracer_no_back_frames = __pyx_state[23]; __pyx_result.top_level_thread_tracer_unhandled = __pyx_state[24]; __pyx_result.trace_suspend_type = __pyx_state[25] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBAdditionalThreadInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.conditional_breakpoint_exception = __pyx_state[0]; __pyx_result.is_tracing = __pyx_state[1]; __pyx_result.pydev_call_from_jinja2 = __pyx_state[2]; __pyx_result.pydev_call_inside_jinja2 = __pyx_state[3]; __pyx_result.pydev_django_resolve_frame = __pyx_state[4]; __pyx_result.pydev_func_name = __pyx_state[5]; __pyx_result.pydev_message = __pyx_state[6]; __pyx_result.pydev_next_line = __pyx_state[7]; __pyx_result.pydev_notify_kill = __pyx_state[8]; __pyx_result.pydev_original_step_cmd = __pyx_state[9]; __pyx_result.pydev_smart_child_offset = __pyx_state[10]; __pyx_result.pydev_smart_parent_offset = __pyx_state[11]; __pyx_result.pydev_smart_step_into_variants = __pyx_state[12]; __pyx_result.pydev_smart_step_stop = __pyx_state[13]; __pyx_result.pydev_state = __pyx_state[14]; __pyx_result.pydev_step_cmd = __pyx_state[15]; __pyx_result.pydev_step_stop = __pyx_state[16]; __pyx_result.pydev_use_scoped_step_frame = __pyx_state[17]; __pyx_result.step_in_initial_location = __pyx_state[18]; __pyx_result.suspend_type = __pyx_state[19]; __pyx_result.suspended_at_unhandled = __pyx_state[20]; __pyx_result.target_id_to_smart_step_into_variant = __pyx_state[21]; __pyx_result.thread_tracer = __pyx_state[22]; __pyx_result.top_level_thread_tracer_no_back_frames = __pyx_state[23]; __pyx_result.top_level_thread_tracer_unhandled = __pyx_state[24]; __pyx_result.trace_suspend_type = __pyx_state[25] + * if len(__pyx_state) > 26 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBAdditionalThreadInfo__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBAdditionalThreadInfo__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo __pyx_result, tuple __pyx_state): + * __pyx_result.conditional_breakpoint_exception = __pyx_state[0]; __pyx_result.is_tracing = __pyx_state[1]; __pyx_result.pydev_call_from_jinja2 = __pyx_state[2]; __pyx_result.pydev_call_inside_jinja2 = __pyx_state[3]; __pyx_result.pydev_django_resolve_frame = __pyx_state[4]; __pyx_result.pydev_func_name = __pyx_state[5]; __pyx_result.pydev_message = __pyx_state[6]; __pyx_result.pydev_next_line = __pyx_state[7]; __pyx_result.pydev_notify_kill = __pyx_state[8]; __pyx_result.pydev_original_step_cmd = __pyx_state[9]; __pyx_result.pydev_smart_child_offset = __pyx_state[10]; __pyx_result.pydev_smart_parent_offset = __pyx_state[11]; __pyx_result.pydev_smart_step_into_variants = __pyx_state[12]; __pyx_result.pydev_smart_step_stop = __pyx_state[13]; __pyx_result.pydev_state = __pyx_state[14]; __pyx_result.pydev_step_cmd = __pyx_state[15]; __pyx_result.pydev_step_stop = __pyx_state[16]; __pyx_result.pydev_use_scoped_step_frame = __pyx_state[17]; __pyx_result.step_in_initial_location = __pyx_state[18]; __pyx_result.suspend_type = __pyx_state[19]; __pyx_result.suspended_at_unhandled = __pyx_state[20]; __pyx_result.target_id_to_smart_step_into_variant = __pyx_state[21]; __pyx_result.thread_tracer = __pyx_state[22]; __pyx_result.top_level_thread_tracer_no_back_frames = __pyx_state[23]; __pyx_result.top_level_thread_tracer_unhandled = __pyx_state[24]; __pyx_result.trace_suspend_type = __pyx_state[25] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 26 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[26]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->conditional_breakpoint_exception); + __Pyx_DECREF(__pyx_v___pyx_result->conditional_breakpoint_exception); + __pyx_v___pyx_result->conditional_breakpoint_exception = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->is_tracing = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->pydev_call_from_jinja2); + __Pyx_DECREF(__pyx_v___pyx_result->pydev_call_from_jinja2); + __pyx_v___pyx_result->pydev_call_from_jinja2 = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->pydev_call_inside_jinja2); + __Pyx_DECREF(__pyx_v___pyx_result->pydev_call_inside_jinja2); + __pyx_v___pyx_result->pydev_call_inside_jinja2 = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->pydev_django_resolve_frame = __pyx_t_3; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->pydev_func_name); + __Pyx_DECREF(__pyx_v___pyx_result->pydev_func_name); + __pyx_v___pyx_result->pydev_func_name = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->pydev_message); + __Pyx_DECREF(__pyx_v___pyx_result->pydev_message); + __pyx_v___pyx_result->pydev_message = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 7, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->pydev_next_line = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 8, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->pydev_notify_kill = __pyx_t_3; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 9, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->pydev_original_step_cmd = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 10, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->pydev_smart_child_offset = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 11, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->pydev_smart_parent_offset = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 12, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->pydev_smart_step_into_variants); + __Pyx_DECREF(__pyx_v___pyx_result->pydev_smart_step_into_variants); + __pyx_v___pyx_result->pydev_smart_step_into_variants = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 13, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->pydev_smart_step_stop); + __Pyx_DECREF(__pyx_v___pyx_result->pydev_smart_step_stop); + __pyx_v___pyx_result->pydev_smart_step_stop = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 14, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->pydev_state = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 15, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->pydev_step_cmd = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 16, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->pydev_step_stop); + __Pyx_DECREF(__pyx_v___pyx_result->pydev_step_stop); + __pyx_v___pyx_result->pydev_step_stop = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 17, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->pydev_use_scoped_step_frame = __pyx_t_3; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 18, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->step_in_initial_location); + __Pyx_DECREF(__pyx_v___pyx_result->step_in_initial_location); + __pyx_v___pyx_result->step_in_initial_location = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 19, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->suspend_type = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 20, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->suspended_at_unhandled = __pyx_t_3; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 21, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyDict_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->target_id_to_smart_step_into_variant); + __Pyx_DECREF(__pyx_v___pyx_result->target_id_to_smart_step_into_variant); + __pyx_v___pyx_result->target_id_to_smart_step_into_variant = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 22, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->thread_tracer); + __Pyx_DECREF(__pyx_v___pyx_result->thread_tracer); + __pyx_v___pyx_result->thread_tracer = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 23, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->top_level_thread_tracer_no_back_frames); + __Pyx_DECREF(__pyx_v___pyx_result->top_level_thread_tracer_no_back_frames); + __pyx_v___pyx_result->top_level_thread_tracer_no_back_frames = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 24, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->top_level_thread_tracer_unhandled); + __Pyx_DECREF(__pyx_v___pyx_result->top_level_thread_tracer_unhandled); + __pyx_v___pyx_result->top_level_thread_tracer_unhandled = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 25, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->trace_suspend_type); + __Pyx_DECREF(__pyx_v___pyx_result->trace_suspend_type); + __pyx_v___pyx_result->trace_suspend_type = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo __pyx_result, tuple __pyx_state): + * __pyx_result.conditional_breakpoint_exception = __pyx_state[0]; __pyx_result.is_tracing = __pyx_state[1]; __pyx_result.pydev_call_from_jinja2 = __pyx_state[2]; __pyx_result.pydev_call_inside_jinja2 = __pyx_state[3]; __pyx_result.pydev_django_resolve_frame = __pyx_state[4]; __pyx_result.pydev_func_name = __pyx_state[5]; __pyx_result.pydev_message = __pyx_state[6]; __pyx_result.pydev_next_line = __pyx_state[7]; __pyx_result.pydev_notify_kill = __pyx_state[8]; __pyx_result.pydev_original_step_cmd = __pyx_state[9]; __pyx_result.pydev_smart_child_offset = __pyx_state[10]; __pyx_result.pydev_smart_parent_offset = __pyx_state[11]; __pyx_result.pydev_smart_step_into_variants = __pyx_state[12]; __pyx_result.pydev_smart_step_stop = __pyx_state[13]; __pyx_result.pydev_state = __pyx_state[14]; __pyx_result.pydev_step_cmd = __pyx_state[15]; __pyx_result.pydev_step_stop = __pyx_state[16]; __pyx_result.pydev_use_scoped_step_frame = __pyx_state[17]; __pyx_result.step_in_initial_location = __pyx_state[18]; __pyx_result.suspend_type = __pyx_state[19]; __pyx_result.suspended_at_unhandled = __pyx_state[20]; __pyx_result.target_id_to_smart_step_into_variant = __pyx_state[21]; __pyx_result.thread_tracer = __pyx_state[22]; __pyx_result.top_level_thread_tracer_no_back_frames = __pyx_state[23]; __pyx_result.top_level_thread_tracer_unhandled = __pyx_state[24]; __pyx_result.trace_suspend_type = __pyx_state[25] + * if len(__pyx_state) > 26 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[26]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(2, 13, __pyx_L1_error) + } + __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_5 = ((__pyx_t_4 > 26) != 0); + if (__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + __pyx_t_3 = __pyx_t_6; + __pyx_L4_bool_binop_done:; + if (__pyx_t_3) { + + /* "(tree fragment)":14 + * __pyx_result.conditional_breakpoint_exception = __pyx_state[0]; __pyx_result.is_tracing = __pyx_state[1]; __pyx_result.pydev_call_from_jinja2 = __pyx_state[2]; __pyx_result.pydev_call_inside_jinja2 = __pyx_state[3]; __pyx_result.pydev_django_resolve_frame = __pyx_state[4]; __pyx_result.pydev_func_name = __pyx_state[5]; __pyx_result.pydev_message = __pyx_state[6]; __pyx_result.pydev_next_line = __pyx_state[7]; __pyx_result.pydev_notify_kill = __pyx_state[8]; __pyx_result.pydev_original_step_cmd = __pyx_state[9]; __pyx_result.pydev_smart_child_offset = __pyx_state[10]; __pyx_result.pydev_smart_parent_offset = __pyx_state[11]; __pyx_result.pydev_smart_step_into_variants = __pyx_state[12]; __pyx_result.pydev_smart_step_stop = __pyx_state[13]; __pyx_result.pydev_state = __pyx_state[14]; __pyx_result.pydev_step_cmd = __pyx_state[15]; __pyx_result.pydev_step_stop = __pyx_state[16]; __pyx_result.pydev_use_scoped_step_frame = __pyx_state[17]; __pyx_result.step_in_initial_location = __pyx_state[18]; __pyx_result.suspend_type = __pyx_state[19]; __pyx_result.suspended_at_unhandled = __pyx_state[20]; __pyx_result.target_id_to_smart_step_into_variant = __pyx_state[21]; __pyx_result.thread_tracer = __pyx_state[22]; __pyx_result.top_level_thread_tracer_no_back_frames = __pyx_state[23]; __pyx_result.top_level_thread_tracer_unhandled = __pyx_state[24]; __pyx_result.trace_suspend_type = __pyx_state[25] + * if len(__pyx_state) > 26 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[26]) # <<<<<<<<<<<<<< + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 14, __pyx_L1_error) + } + __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 26, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo __pyx_result, tuple __pyx_state): + * __pyx_result.conditional_breakpoint_exception = __pyx_state[0]; __pyx_result.is_tracing = __pyx_state[1]; __pyx_result.pydev_call_from_jinja2 = __pyx_state[2]; __pyx_result.pydev_call_inside_jinja2 = __pyx_state[3]; __pyx_result.pydev_django_resolve_frame = __pyx_state[4]; __pyx_result.pydev_func_name = __pyx_state[5]; __pyx_result.pydev_message = __pyx_state[6]; __pyx_result.pydev_next_line = __pyx_state[7]; __pyx_result.pydev_notify_kill = __pyx_state[8]; __pyx_result.pydev_original_step_cmd = __pyx_state[9]; __pyx_result.pydev_smart_child_offset = __pyx_state[10]; __pyx_result.pydev_smart_parent_offset = __pyx_state[11]; __pyx_result.pydev_smart_step_into_variants = __pyx_state[12]; __pyx_result.pydev_smart_step_stop = __pyx_state[13]; __pyx_result.pydev_state = __pyx_state[14]; __pyx_result.pydev_step_cmd = __pyx_state[15]; __pyx_result.pydev_step_stop = __pyx_state[16]; __pyx_result.pydev_use_scoped_step_frame = __pyx_state[17]; __pyx_result.step_in_initial_location = __pyx_state[18]; __pyx_result.suspend_type = __pyx_state[19]; __pyx_result.suspended_at_unhandled = __pyx_state[20]; __pyx_result.target_id_to_smart_step_into_variant = __pyx_state[21]; __pyx_result.thread_tracer = __pyx_state[22]; __pyx_result.top_level_thread_tracer_no_back_frames = __pyx_state[23]; __pyx_result.top_level_thread_tracer_unhandled = __pyx_state[24]; __pyx_result.trace_suspend_type = __pyx_state[25] + * if len(__pyx_state) > 26 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[26]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.conditional_breakpoint_exception = __pyx_state[0]; __pyx_result.is_tracing = __pyx_state[1]; __pyx_result.pydev_call_from_jinja2 = __pyx_state[2]; __pyx_result.pydev_call_inside_jinja2 = __pyx_state[3]; __pyx_result.pydev_django_resolve_frame = __pyx_state[4]; __pyx_result.pydev_func_name = __pyx_state[5]; __pyx_result.pydev_message = __pyx_state[6]; __pyx_result.pydev_next_line = __pyx_state[7]; __pyx_result.pydev_notify_kill = __pyx_state[8]; __pyx_result.pydev_original_step_cmd = __pyx_state[9]; __pyx_result.pydev_smart_child_offset = __pyx_state[10]; __pyx_result.pydev_smart_parent_offset = __pyx_state[11]; __pyx_result.pydev_smart_step_into_variants = __pyx_state[12]; __pyx_result.pydev_smart_step_stop = __pyx_state[13]; __pyx_result.pydev_state = __pyx_state[14]; __pyx_result.pydev_step_cmd = __pyx_state[15]; __pyx_result.pydev_step_stop = __pyx_state[16]; __pyx_result.pydev_use_scoped_step_frame = __pyx_state[17]; __pyx_result.step_in_initial_location = __pyx_state[18]; __pyx_result.suspend_type = __pyx_state[19]; __pyx_result.suspended_at_unhandled = __pyx_state[20]; __pyx_result.target_id_to_smart_step_into_variant = __pyx_state[21]; __pyx_result.thread_tracer = __pyx_state[22]; __pyx_result.top_level_thread_tracer_no_back_frames = __pyx_state[23]; __pyx_result.top_level_thread_tracer_unhandled = __pyx_state[24]; __pyx_result.trace_suspend_type = __pyx_state[25] + * if len(__pyx_state) > 26 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBAdditionalThreadInfo__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle__TryExceptContainerObj(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15__pyx_unpickle__TryExceptContainerObj(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_15__pyx_unpickle__TryExceptContainerObj = {"__pyx_unpickle__TryExceptContainerObj", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_15__pyx_unpickle__TryExceptContainerObj, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15__pyx_unpickle__TryExceptContainerObj(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle__TryExceptContainerObj (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__TryExceptContainerObj", 1, 3, 3, 1); __PYX_ERR(2, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__TryExceptContainerObj", 1, 3, 3, 2); __PYX_ERR(2, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle__TryExceptContainerObj") < 0)) __PYX_ERR(2, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__TryExceptContainerObj", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle__TryExceptContainerObj", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_14__pyx_unpickle__TryExceptContainerObj(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_14__pyx_unpickle__TryExceptContainerObj(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle__TryExceptContainerObj", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xc8b6eb1, 0xdbf5e44, 0xde17cd3): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xc8b6eb1, 0xdbf5e44, 0xde17cd3) = (try_except_infos))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__10, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0xc8b6eb1, 0xdbf5e44, 0xde17cd3): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xc8b6eb1, 0xdbf5e44, 0xde17cd3) = (try_except_infos))" % __pyx_checksum) + * __pyx_result = _TryExceptContainerObj.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0xc8b6eb1, 0xdbf5e44, 0xde17cd3): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xc8b6eb1, 0xdbf5e44, 0xde17cd3) = (try_except_infos))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = _TryExceptContainerObj.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(2, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xc8b6eb1, 0xdbf5e44, 0xde17cd3): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xc8b6eb1, 0xdbf5e44, 0xde17cd3) = (try_except_infos))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xc8b6eb1, 0xdbf5e44, 0xde17cd3) = (try_except_infos))" % __pyx_checksum) + * __pyx_result = _TryExceptContainerObj.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle__TryExceptContainerObj__set_state(<_TryExceptContainerObj> __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xc8b6eb1, 0xdbf5e44, 0xde17cd3) = (try_except_infos))" % __pyx_checksum) + * __pyx_result = _TryExceptContainerObj.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle__TryExceptContainerObj__set_state(<_TryExceptContainerObj> __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = _TryExceptContainerObj.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle__TryExceptContainerObj__set_state(<_TryExceptContainerObj> __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle__TryExceptContainerObj__set_state(_TryExceptContainerObj __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle__TryExceptContainerObj__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xc8b6eb1, 0xdbf5e44, 0xde17cd3) = (try_except_infos))" % __pyx_checksum) + * __pyx_result = _TryExceptContainerObj.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle__TryExceptContainerObj__set_state(<_TryExceptContainerObj> __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle__TryExceptContainerObj__set_state(<_TryExceptContainerObj> __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle__TryExceptContainerObj__set_state(_TryExceptContainerObj __pyx_result, tuple __pyx_state): + * __pyx_result.try_except_infos = __pyx_state[0] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle__TryExceptContainerObj(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle__TryExceptContainerObj", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle__TryExceptContainerObj__set_state(<_TryExceptContainerObj> __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle__TryExceptContainerObj__set_state(_TryExceptContainerObj __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.try_except_infos = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle__TryExceptContainerObj__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle__TryExceptContainerObj__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle__TryExceptContainerObj__set_state(_TryExceptContainerObj __pyx_result, tuple __pyx_state): + * __pyx_result.try_except_infos = __pyx_state[0] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyList_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->try_except_infos); + __Pyx_DECREF(__pyx_v___pyx_result->try_except_infos); + __pyx_v___pyx_result->try_except_infos = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle__TryExceptContainerObj__set_state(_TryExceptContainerObj __pyx_result, tuple __pyx_state): + * __pyx_result.try_except_infos = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(2, 13, __pyx_L1_error) + } + __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_4 = ((__pyx_t_3 > 1) != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result.try_except_infos = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 14, __pyx_L1_error) + } + __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle__TryExceptContainerObj__set_state(_TryExceptContainerObj __pyx_result, tuple __pyx_state): + * __pyx_result.try_except_infos = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle__TryExceptContainerObj__set_state(<_TryExceptContainerObj> __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle__TryExceptContainerObj__set_state(_TryExceptContainerObj __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.try_except_infos = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle__TryExceptContainerObj__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_PyDBFrame(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBFrame(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBFrame = {"__pyx_unpickle_PyDBFrame", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBFrame, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBFrame(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBFrame (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBFrame", 1, 3, 3, 1); __PYX_ERR(2, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBFrame", 1, 3, 3, 2); __PYX_ERR(2, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_PyDBFrame") < 0)) __PYX_ERR(2, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBFrame", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBFrame", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_16__pyx_unpickle_PyDBFrame(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_16__pyx_unpickle_PyDBFrame(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBFrame", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x506e682, 0x3a8c26e, 0xb793695): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x506e682, 0x3a8c26e, 0xb793695) = (_args, exc_info, should_skip))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__11, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0x506e682, 0x3a8c26e, 0xb793695): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x506e682, 0x3a8c26e, 0xb793695) = (_args, exc_info, should_skip))" % __pyx_checksum) + * __pyx_result = PyDBFrame.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0x506e682, 0x3a8c26e, 0xb793695): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x506e682, 0x3a8c26e, 0xb793695) = (_args, exc_info, should_skip))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = PyDBFrame.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_3, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(2, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x506e682, 0x3a8c26e, 0xb793695): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x506e682, 0x3a8c26e, 0xb793695) = (_args, exc_info, should_skip))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x506e682, 0x3a8c26e, 0xb793695) = (_args, exc_info, should_skip))" % __pyx_checksum) + * __pyx_result = PyDBFrame.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_PyDBFrame__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x506e682, 0x3a8c26e, 0xb793695) = (_args, exc_info, should_skip))" % __pyx_checksum) + * __pyx_result = PyDBFrame.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = PyDBFrame.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_PyDBFrame__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBFrame__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x506e682, 0x3a8c26e, 0xb793695) = (_args, exc_info, should_skip))" % __pyx_checksum) + * __pyx_result = PyDBFrame.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_PyDBFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0]; __pyx_result.exc_info = __pyx_state[1]; __pyx_result.should_skip = __pyx_state[2] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_PyDBFrame(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBFrame", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_PyDBFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0]; __pyx_result.exc_info = __pyx_state[1]; __pyx_result.should_skip = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBFrame__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBFrame__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0]; __pyx_result.exc_info = __pyx_state[1]; __pyx_result.should_skip = __pyx_state[2] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[3]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_args); + __Pyx_DECREF(__pyx_v___pyx_result->_args); + __pyx_v___pyx_result->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->exc_info); + __Pyx_DECREF(__pyx_v___pyx_result->exc_info); + __pyx_v___pyx_result->exc_info = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->should_skip = __pyx_t_2; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0]; __pyx_result.exc_info = __pyx_state[1]; __pyx_result.should_skip = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[3]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(2, 13, __pyx_L1_error) + } + __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_5 = ((__pyx_t_4 > 3) != 0); + if (__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + __pyx_t_3 = __pyx_t_6; + __pyx_L4_bool_binop_done:; + if (__pyx_t_3) { + + /* "(tree fragment)":14 + * __pyx_result._args = __pyx_state[0]; __pyx_result.exc_info = __pyx_state[1]; __pyx_result.should_skip = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[3]) # <<<<<<<<<<<<<< + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 14, __pyx_L1_error) + } + __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0]; __pyx_result.exc_info = __pyx_state[1]; __pyx_result.should_skip = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[3]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_PyDBFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0]; __pyx_result.exc_info = __pyx_state[1]; __pyx_result.should_skip = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBFrame__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_SafeCallWrapper(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_SafeCallWrapper(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_SafeCallWrapper = {"__pyx_unpickle_SafeCallWrapper", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_SafeCallWrapper, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_SafeCallWrapper(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_SafeCallWrapper (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_SafeCallWrapper", 1, 3, 3, 1); __PYX_ERR(2, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_SafeCallWrapper", 1, 3, 3, 2); __PYX_ERR(2, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_SafeCallWrapper") < 0)) __PYX_ERR(2, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_SafeCallWrapper", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_SafeCallWrapper", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_18__pyx_unpickle_SafeCallWrapper(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_18__pyx_unpickle_SafeCallWrapper(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_SafeCallWrapper", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x77c077b, 0xa14289b, 0x3cc10aa): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x77c077b, 0xa14289b, 0x3cc10aa) = (method_object))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__12, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0x77c077b, 0xa14289b, 0x3cc10aa): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x77c077b, 0xa14289b, 0x3cc10aa) = (method_object))" % __pyx_checksum) + * __pyx_result = SafeCallWrapper.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0x77c077b, 0xa14289b, 0x3cc10aa): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x77c077b, 0xa14289b, 0x3cc10aa) = (method_object))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = SafeCallWrapper.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_4, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(2, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x77c077b, 0xa14289b, 0x3cc10aa): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x77c077b, 0xa14289b, 0x3cc10aa) = (method_object))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x77c077b, 0xa14289b, 0x3cc10aa) = (method_object))" % __pyx_checksum) + * __pyx_result = SafeCallWrapper.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_SafeCallWrapper__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x77c077b, 0xa14289b, 0x3cc10aa) = (method_object))" % __pyx_checksum) + * __pyx_result = SafeCallWrapper.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_SafeCallWrapper__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = SafeCallWrapper.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_SafeCallWrapper__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_SafeCallWrapper__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x77c077b, 0xa14289b, 0x3cc10aa) = (method_object))" % __pyx_checksum) + * __pyx_result = SafeCallWrapper.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_SafeCallWrapper__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_SafeCallWrapper__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper __pyx_result, tuple __pyx_state): + * __pyx_result.method_object = __pyx_state[0] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_SafeCallWrapper(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_SafeCallWrapper", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_SafeCallWrapper__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.method_object = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_SafeCallWrapper__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_SafeCallWrapper__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper __pyx_result, tuple __pyx_state): + * __pyx_result.method_object = __pyx_state[0] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->method_object); + __Pyx_DECREF(__pyx_v___pyx_result->method_object); + __pyx_v___pyx_result->method_object = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper __pyx_result, tuple __pyx_state): + * __pyx_result.method_object = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(2, 13, __pyx_L1_error) + } + __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_4 = ((__pyx_t_3 > 1) != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result.method_object = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 14, __pyx_L1_error) + } + __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper __pyx_result, tuple __pyx_state): + * __pyx_result.method_object = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_SafeCallWrapper__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.method_object = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_SafeCallWrapper__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions = {"__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions", 1, 3, 3, 1); __PYX_ERR(2, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions", 1, 3, 3, 2); __PYX_ERR(2, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions") < 0)) __PYX_ERR(2, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_20__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_20__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3d7902a, 0x121e1fb, 0xf3a61b1): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__13, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0x3d7902a, 0x121e1fb, 0xf3a61b1): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + * __pyx_result = TopLevelThreadTracerOnlyUnhandledExceptions.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0x3d7902a, 0x121e1fb, 0xf3a61b1): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = TopLevelThreadTracerOnlyUnhandledExceptions.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_5, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(2, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3d7902a, 0x121e1fb, 0xf3a61b1): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + * __pyx_result = TopLevelThreadTracerOnlyUnhandledExceptions.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + * __pyx_result = TopLevelThreadTracerOnlyUnhandledExceptions.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = TopLevelThreadTracerOnlyUnhandledExceptions.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(TopLevelThreadTracerOnlyUnhandledExceptions __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + * __pyx_result = TopLevelThreadTracerOnlyUnhandledExceptions.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(TopLevelThreadTracerOnlyUnhandledExceptions __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(TopLevelThreadTracerOnlyUnhandledExceptions __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(TopLevelThreadTracerOnlyUnhandledExceptions __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_args); + __Pyx_DECREF(__pyx_v___pyx_result->_args); + __pyx_v___pyx_result->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(TopLevelThreadTracerOnlyUnhandledExceptions __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(2, 13, __pyx_L1_error) + } + __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_4 = ((__pyx_t_3 > 1) != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 14, __pyx_L1_error) + } + __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(TopLevelThreadTracerOnlyUnhandledExceptions __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(TopLevelThreadTracerOnlyUnhandledExceptions __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_TopLevelThreadTracerNoBackFrame(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_TopLevelThreadTracerNoBackFrame(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_TopLevelThreadTracerNoBackFrame = {"__pyx_unpickle_TopLevelThreadTracerNoBackFrame", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_TopLevelThreadTracerNoBackFrame, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_TopLevelThreadTracerNoBackFrame(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_TopLevelThreadTracerNoBackFrame (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_TopLevelThreadTracerNoBackFrame", 1, 3, 3, 1); __PYX_ERR(2, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_TopLevelThreadTracerNoBackFrame", 1, 3, 3, 2); __PYX_ERR(2, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_TopLevelThreadTracerNoBackFrame") < 0)) __PYX_ERR(2, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_TopLevelThreadTracerNoBackFrame", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_TopLevelThreadTracerNoBackFrame", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_22__pyx_unpickle_TopLevelThreadTracerNoBackFrame(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_22__pyx_unpickle_TopLevelThreadTracerNoBackFrame(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_TopLevelThreadTracerNoBackFrame", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96) = (_args, _frame_trace_dispatch, _last_exc_arg, _last_raise_line, _raise_lines, try_except_infos))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__14, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96) = (_args, _frame_trace_dispatch, _last_exc_arg, _last_raise_line, _raise_lines, try_except_infos))" % __pyx_checksum) + * __pyx_result = TopLevelThreadTracerNoBackFrame.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96) = (_args, _frame_trace_dispatch, _last_exc_arg, _last_raise_line, _raise_lines, try_except_infos))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = TopLevelThreadTracerNoBackFrame.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_6, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(2, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96) = (_args, _frame_trace_dispatch, _last_exc_arg, _last_raise_line, _raise_lines, try_except_infos))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96) = (_args, _frame_trace_dispatch, _last_exc_arg, _last_raise_line, _raise_lines, try_except_infos))" % __pyx_checksum) + * __pyx_result = TopLevelThreadTracerNoBackFrame.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96) = (_args, _frame_trace_dispatch, _last_exc_arg, _last_raise_line, _raise_lines, try_except_infos))" % __pyx_checksum) + * __pyx_result = TopLevelThreadTracerNoBackFrame.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = TopLevelThreadTracerNoBackFrame.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(TopLevelThreadTracerNoBackFrame __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xa3a9ec1, 0x3f5f7e9, 0x0ff9c96) = (_args, _frame_trace_dispatch, _last_exc_arg, _last_raise_line, _raise_lines, try_except_infos))" % __pyx_checksum) + * __pyx_result = TopLevelThreadTracerNoBackFrame.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(TopLevelThreadTracerNoBackFrame __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0]; __pyx_result._frame_trace_dispatch = __pyx_state[1]; __pyx_result._last_exc_arg = __pyx_state[2]; __pyx_result._last_raise_line = __pyx_state[3]; __pyx_result._raise_lines = __pyx_state[4]; __pyx_result.try_except_infos = __pyx_state[5] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_TopLevelThreadTracerNoBackFrame(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_TopLevelThreadTracerNoBackFrame", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(TopLevelThreadTracerNoBackFrame __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0]; __pyx_result._frame_trace_dispatch = __pyx_state[1]; __pyx_result._last_exc_arg = __pyx_state[2]; __pyx_result._last_raise_line = __pyx_state[3]; __pyx_result._raise_lines = __pyx_state[4]; __pyx_result.try_except_infos = __pyx_state[5] + * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(TopLevelThreadTracerNoBackFrame __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0]; __pyx_result._frame_trace_dispatch = __pyx_state[1]; __pyx_result._last_exc_arg = __pyx_state[2]; __pyx_result._last_raise_line = __pyx_state[3]; __pyx_result._raise_lines = __pyx_state[4]; __pyx_result.try_except_infos = __pyx_state[5] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[6]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_args); + __Pyx_DECREF(__pyx_v___pyx_result->_args); + __pyx_v___pyx_result->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_frame_trace_dispatch); + __Pyx_DECREF(__pyx_v___pyx_result->_frame_trace_dispatch); + __pyx_v___pyx_result->_frame_trace_dispatch = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_last_exc_arg); + __Pyx_DECREF(__pyx_v___pyx_result->_last_exc_arg); + __pyx_v___pyx_result->_last_exc_arg = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->_last_raise_line = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PySet_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "set", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_raise_lines); + __Pyx_DECREF(__pyx_v___pyx_result->_raise_lines); + __pyx_v___pyx_result->_raise_lines = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->try_except_infos); + __Pyx_DECREF(__pyx_v___pyx_result->try_except_infos); + __pyx_v___pyx_result->try_except_infos = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(TopLevelThreadTracerNoBackFrame __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0]; __pyx_result._frame_trace_dispatch = __pyx_state[1]; __pyx_result._last_exc_arg = __pyx_state[2]; __pyx_result._last_raise_line = __pyx_state[3]; __pyx_result._raise_lines = __pyx_state[4]; __pyx_result.try_except_infos = __pyx_state[5] + * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[6]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(2, 13, __pyx_L1_error) + } + __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_5 = ((__pyx_t_4 > 6) != 0); + if (__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + __pyx_t_3 = __pyx_t_6; + __pyx_L4_bool_binop_done:; + if (__pyx_t_3) { + + /* "(tree fragment)":14 + * __pyx_result._args = __pyx_state[0]; __pyx_result._frame_trace_dispatch = __pyx_state[1]; __pyx_result._last_exc_arg = __pyx_state[2]; __pyx_result._last_raise_line = __pyx_state[3]; __pyx_result._raise_lines = __pyx_state[4]; __pyx_result.try_except_infos = __pyx_state[5] + * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[6]) # <<<<<<<<<<<<<< + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 14, __pyx_L1_error) + } + __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(TopLevelThreadTracerNoBackFrame __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0]; __pyx_result._frame_trace_dispatch = __pyx_state[1]; __pyx_result._last_exc_arg = __pyx_state[2]; __pyx_result._last_raise_line = __pyx_state[3]; __pyx_result._raise_lines = __pyx_state[4]; __pyx_result.try_except_infos = __pyx_state[5] + * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[6]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state(TopLevelThreadTracerNoBackFrame __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0]; __pyx_result._frame_trace_dispatch = __pyx_state[1]; __pyx_result._last_exc_arg = __pyx_state[2]; __pyx_result._last_raise_line = __pyx_state[3]; __pyx_result._raise_lines = __pyx_state[4]; __pyx_result.try_except_infos = __pyx_state[5] + * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_TopLevelThreadTracerNoBackFrame__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_ThreadTracer(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_25__pyx_unpickle_ThreadTracer(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_25__pyx_unpickle_ThreadTracer = {"__pyx_unpickle_ThreadTracer", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_25__pyx_unpickle_ThreadTracer, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_25__pyx_unpickle_ThreadTracer(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_ThreadTracer (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ThreadTracer", 1, 3, 3, 1); __PYX_ERR(2, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ThreadTracer", 1, 3, 3, 2); __PYX_ERR(2, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_ThreadTracer") < 0)) __PYX_ERR(2, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ThreadTracer", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_ThreadTracer", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24__pyx_unpickle_ThreadTracer(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24__pyx_unpickle_ThreadTracer(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_ThreadTracer", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3d7902a, 0x121e1fb, 0xf3a61b1): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__13, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0x3d7902a, 0x121e1fb, 0xf3a61b1): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + * __pyx_result = ThreadTracer.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0x3d7902a, 0x121e1fb, 0xf3a61b1): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = ThreadTracer.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_5, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(2, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3d7902a, 0x121e1fb, 0xf3a61b1): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + * __pyx_result = ThreadTracer.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_ThreadTracer__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + * __pyx_result = ThreadTracer.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadTracer__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = ThreadTracer.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_ThreadTracer__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_ThreadTracer__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d7902a, 0x121e1fb, 0xf3a61b1) = (_args))" % __pyx_checksum) + * __pyx_result = ThreadTracer.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadTracer__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_ThreadTracer__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_ThreadTracer(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_ThreadTracer", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_ThreadTracer__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_ThreadTracer__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_ThreadTracer__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(2, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_args); + __Pyx_DECREF(__pyx_v___pyx_result->_args); + __pyx_v___pyx_result->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(2, 13, __pyx_L1_error) + } + __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_4 = ((__pyx_t_3 > 1) != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(2, 13, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(2, 14, __pyx_L1_error) + } + __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer __pyx_result, tuple __pyx_state): + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_ThreadTracer__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_ThreadTracer__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)o); + p->pydev_step_stop = Py_None; Py_INCREF(Py_None); + p->pydev_smart_step_stop = Py_None; Py_INCREF(Py_None); + p->pydev_call_from_jinja2 = Py_None; Py_INCREF(Py_None); + p->pydev_call_inside_jinja2 = Py_None; Py_INCREF(Py_None); + p->conditional_breakpoint_exception = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->pydev_message = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->pydev_func_name = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->trace_suspend_type = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->top_level_thread_tracer_no_back_frames = Py_None; Py_INCREF(Py_None); + p->top_level_thread_tracer_unhandled = Py_None; Py_INCREF(Py_None); + p->thread_tracer = Py_None; Py_INCREF(Py_None); + p->step_in_initial_location = Py_None; Py_INCREF(Py_None); + p->pydev_smart_step_into_variants = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->target_id_to_smart_step_into_variant = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->pydev_step_stop); + Py_CLEAR(p->pydev_smart_step_stop); + Py_CLEAR(p->pydev_call_from_jinja2); + Py_CLEAR(p->pydev_call_inside_jinja2); + Py_CLEAR(p->conditional_breakpoint_exception); + Py_CLEAR(p->pydev_message); + Py_CLEAR(p->pydev_func_name); + Py_CLEAR(p->trace_suspend_type); + Py_CLEAR(p->top_level_thread_tracer_no_back_frames); + Py_CLEAR(p->top_level_thread_tracer_unhandled); + Py_CLEAR(p->thread_tracer); + Py_CLEAR(p->step_in_initial_location); + Py_CLEAR(p->pydev_smart_step_into_variants); + Py_CLEAR(p->target_id_to_smart_step_into_variant); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)o; + if (p->pydev_step_stop) { + e = (*v)(p->pydev_step_stop, a); if (e) return e; + } + if (p->pydev_smart_step_stop) { + e = (*v)(p->pydev_smart_step_stop, a); if (e) return e; + } + if (p->pydev_call_from_jinja2) { + e = (*v)(p->pydev_call_from_jinja2, a); if (e) return e; + } + if (p->pydev_call_inside_jinja2) { + e = (*v)(p->pydev_call_inside_jinja2, a); if (e) return e; + } + if (p->conditional_breakpoint_exception) { + e = (*v)(p->conditional_breakpoint_exception, a); if (e) return e; + } + if (p->top_level_thread_tracer_no_back_frames) { + e = (*v)(p->top_level_thread_tracer_no_back_frames, a); if (e) return e; + } + if (p->top_level_thread_tracer_unhandled) { + e = (*v)(p->top_level_thread_tracer_unhandled, a); if (e) return e; + } + if (p->thread_tracer) { + e = (*v)(p->thread_tracer, a); if (e) return e; + } + if (p->step_in_initial_location) { + e = (*v)(p->step_in_initial_location, a); if (e) return e; + } + if (p->pydev_smart_step_into_variants) { + e = (*v)(p->pydev_smart_step_into_variants, a); if (e) return e; + } + if (p->target_id_to_smart_step_into_variant) { + e = (*v)(p->target_id_to_smart_step_into_variant, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)o; + tmp = ((PyObject*)p->pydev_step_stop); + p->pydev_step_stop = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->pydev_smart_step_stop); + p->pydev_smart_step_stop = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->pydev_call_from_jinja2); + p->pydev_call_from_jinja2 = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->pydev_call_inside_jinja2); + p->pydev_call_inside_jinja2 = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->conditional_breakpoint_exception); + p->conditional_breakpoint_exception = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->top_level_thread_tracer_no_back_frames); + p->top_level_thread_tracer_no_back_frames = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->top_level_thread_tracer_unhandled); + p->top_level_thread_tracer_unhandled = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->thread_tracer); + p->thread_tracer = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->step_in_initial_location); + p->step_in_initial_location = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->pydev_smart_step_into_variants); + p->pydev_smart_step_into_variants = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->target_id_to_smart_step_into_variant); + p->target_id_to_smart_step_into_variant = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_state(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_state(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_stop(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_stop(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_original_step_cmd(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_original_step_cmd(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_23pydev_original_step_cmd_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_cmd(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_cmd(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_notify_kill(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_notify_kill(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_stop(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_stop(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_django_resolve_frame(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_django_resolve_frame(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_from_jinja2(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_from_jinja2(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_inside_jinja2(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_inside_jinja2(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_is_tracing(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_is_tracing(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_conditional_breakpoint_exception(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_conditional_breakpoint_exception(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_message(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_message(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspend_type(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspend_type(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_next_line(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_next_line(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_func_name(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_func_name(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspended_at_unhandled(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspended_at_unhandled(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22suspended_at_unhandled_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_trace_suspend_type(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_trace_suspend_type(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_18trace_suspend_type_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_top_level_thread_tracer_no_back_frames(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_top_level_thread_tracer_no_back_frames(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_38top_level_thread_tracer_no_back_frames_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_top_level_thread_tracer_unhandled(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_top_level_thread_tracer_unhandled(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_33top_level_thread_tracer_unhandled_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_thread_tracer(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_thread_tracer(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13thread_tracer_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_step_in_initial_location(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_step_in_initial_location(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24step_in_initial_location_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_parent_offset(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_parent_offset(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_25pydev_smart_parent_offset_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_child_offset(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_child_offset(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_smart_child_offset_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_into_variants(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_into_variants(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_30pydev_smart_step_into_variants_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_target_id_to_smart_step_into_variant(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_target_id_to_smart_step_into_variant(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_36target_id_to_smart_step_into_variant_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_use_scoped_step_frame(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_use_scoped_step_frame(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_27pydev_use_scoped_step_frame_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo[] = { + {"get_topmost_frame", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_3get_topmost_frame, METH_O, __pyx_doc_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_2get_topmost_frame}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_7__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_9__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo[] = { + {(char *)"pydev_state", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_state, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_state, (char *)0, 0}, + {(char *)"pydev_step_stop", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_stop, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_stop, (char *)0, 0}, + {(char *)"pydev_original_step_cmd", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_original_step_cmd, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_original_step_cmd, (char *)0, 0}, + {(char *)"pydev_step_cmd", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_cmd, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_cmd, (char *)0, 0}, + {(char *)"pydev_notify_kill", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_notify_kill, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_notify_kill, (char *)0, 0}, + {(char *)"pydev_smart_step_stop", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_stop, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_stop, (char *)0, 0}, + {(char *)"pydev_django_resolve_frame", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_django_resolve_frame, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_django_resolve_frame, (char *)0, 0}, + {(char *)"pydev_call_from_jinja2", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_from_jinja2, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_from_jinja2, (char *)0, 0}, + {(char *)"pydev_call_inside_jinja2", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_inside_jinja2, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_inside_jinja2, (char *)0, 0}, + {(char *)"is_tracing", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_is_tracing, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_is_tracing, (char *)0, 0}, + {(char *)"conditional_breakpoint_exception", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_conditional_breakpoint_exception, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_conditional_breakpoint_exception, (char *)0, 0}, + {(char *)"pydev_message", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_message, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_message, (char *)0, 0}, + {(char *)"suspend_type", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspend_type, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspend_type, (char *)0, 0}, + {(char *)"pydev_next_line", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_next_line, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_next_line, (char *)0, 0}, + {(char *)"pydev_func_name", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_func_name, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_func_name, (char *)0, 0}, + {(char *)"suspended_at_unhandled", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspended_at_unhandled, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspended_at_unhandled, (char *)0, 0}, + {(char *)"trace_suspend_type", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_trace_suspend_type, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_trace_suspend_type, (char *)0, 0}, + {(char *)"top_level_thread_tracer_no_back_frames", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_top_level_thread_tracer_no_back_frames, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_top_level_thread_tracer_no_back_frames, (char *)0, 0}, + {(char *)"top_level_thread_tracer_unhandled", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_top_level_thread_tracer_unhandled, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_top_level_thread_tracer_unhandled, (char *)0, 0}, + {(char *)"thread_tracer", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_thread_tracer, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_thread_tracer, (char *)0, 0}, + {(char *)"step_in_initial_location", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_step_in_initial_location, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_step_in_initial_location, (char *)0, 0}, + {(char *)"pydev_smart_parent_offset", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_parent_offset, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_parent_offset, (char *)0, 0}, + {(char *)"pydev_smart_child_offset", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_child_offset, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_child_offset, (char *)0, 0}, + {(char *)"pydev_smart_step_into_variants", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_into_variants, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_into_variants, (char *)0, 0}, + {(char *)"target_id_to_smart_step_into_variant", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_target_id_to_smart_step_into_variant, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_target_id_to_smart_step_into_variant, (char *)0, 0}, + {(char *)"pydev_use_scoped_step_frame", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_use_scoped_step_frame, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_use_scoped_step_frame, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_5__str__, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)o); + p->try_except_infos = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->try_except_infos); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)o; + if (p->try_except_infos) { + e = (*v)(p->try_except_infos, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj *)o; + tmp = ((PyObject*)p->try_except_infos); + p->try_except_infos = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_try_except_infos(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_try_except_infos(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_16try_except_infos_5__del__(o); + } +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_3__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_5__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj[] = { + {(char *)"try_except_infos", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_try_except_infos, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_try_except_infos, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython._TryExceptContainerObj", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_22_TryExceptContainerObj_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; +static struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBFrame(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)o); + p->__pyx_vtab = __pyx_vtabptr_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->exc_info = Py_None; Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_PyDBFrame(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_args); + Py_CLEAR(p->exc_info); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_PyDBFrame(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)o; + if (p->_args) { + e = (*v)(p->_args, a); if (e) return e; + } + if (p->exc_info) { + e = (*v)(p->exc_info, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_PyDBFrame(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)o; + tmp = ((PyObject*)p->_args); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->exc_info); + p->exc_info = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_PyDBFrame[] = { + {"set_suspend", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_3set_suspend, METH_VARARGS|METH_KEYWORDS, 0}, + {"do_wait_suspend", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_5do_wait_suspend, METH_VARARGS|METH_KEYWORDS, 0}, + {"trace_exception", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_7trace_exception, METH_VARARGS|METH_KEYWORDS, 0}, + {"handle_user_exception", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_9handle_user_exception, METH_O, 0}, + {"trace_dispatch", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_11trace_dispatch, METH_VARARGS|METH_KEYWORDS, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_13__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_15__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.PyDBFrame", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)o); + p->method_object = Py_None; Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->method_object); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)o; + if (p->method_object) { + e = (*v)(p->method_object, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)o; + tmp = ((PyObject*)p->method_object); + p->method_object = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper[] = { + {"get_method_object", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_5get_method_object, METH_NOARGS, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_7__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_9__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.SafeCallWrapper", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_3__call__, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)o); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_args); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)o; + if (p->_args) { + e = (*v)(p->_args, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions *)o; + tmp = ((PyObject*)p->_args); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions__args(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions__args(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5_args_5__del__(o); + } +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions[] = { + {"trace_unhandled_exceptions", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_3trace_unhandled_exceptions, METH_VARARGS|METH_KEYWORDS, 0}, + {"get_trace_dispatch_func", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_5get_trace_dispatch_func, METH_NOARGS, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_7__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_9__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions[] = { + {(char *)"_args", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions__args, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions__args, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.TopLevelThreadTracerOnlyUnhandledExceptions", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_43TopLevelThreadTracerOnlyUnhandledExceptions_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)o); + p->_frame_trace_dispatch = Py_None; Py_INCREF(Py_None); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->try_except_infos = Py_None; Py_INCREF(Py_None); + p->_last_exc_arg = Py_None; Py_INCREF(Py_None); + p->_raise_lines = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_frame_trace_dispatch); + Py_CLEAR(p->_args); + Py_CLEAR(p->try_except_infos); + Py_CLEAR(p->_last_exc_arg); + Py_CLEAR(p->_raise_lines); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)o; + if (p->_frame_trace_dispatch) { + e = (*v)(p->_frame_trace_dispatch, a); if (e) return e; + } + if (p->_args) { + e = (*v)(p->_args, a); if (e) return e; + } + if (p->try_except_infos) { + e = (*v)(p->try_except_infos, a); if (e) return e; + } + if (p->_last_exc_arg) { + e = (*v)(p->_last_exc_arg, a); if (e) return e; + } + if (p->_raise_lines) { + e = (*v)(p->_raise_lines, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame *)o; + tmp = ((PyObject*)p->_frame_trace_dispatch); + p->_frame_trace_dispatch = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_args); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->try_except_infos); + p->try_except_infos = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_last_exc_arg); + p->_last_exc_arg = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_raise_lines); + p->_raise_lines = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__frame_trace_dispatch(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__frame_trace_dispatch(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_21_frame_trace_dispatch_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__args(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__args(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5_args_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_try_except_infos(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_try_except_infos(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16try_except_infos_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__last_exc_arg(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__last_exc_arg(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_13_last_exc_arg_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__raise_lines(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__raise_lines(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_12_raise_lines_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__last_raise_line(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__last_raise_line(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_16_last_raise_line_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame[] = { + {"trace_dispatch_and_unhandled_exceptions", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_3trace_dispatch_and_unhandled_exceptions, METH_VARARGS|METH_KEYWORDS, 0}, + {"get_trace_dispatch_func", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_5get_trace_dispatch_func, METH_NOARGS, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_7__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_9__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame[] = { + {(char *)"_frame_trace_dispatch", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__frame_trace_dispatch, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__frame_trace_dispatch, (char *)0, 0}, + {(char *)"_args", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__args, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__args, (char *)0, 0}, + {(char *)"try_except_infos", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_try_except_infos, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_try_except_infos, (char *)0, 0}, + {(char *)"_last_exc_arg", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__last_exc_arg, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__last_exc_arg, (char *)0, 0}, + {(char *)"_raise_lines", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__raise_lines, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__raise_lines, (char *)0, 0}, + {(char *)"_last_raise_line", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__last_raise_line, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame__last_raise_line, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.TopLevelThreadTracerNoBackFrame", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_31TopLevelThreadTracerNoBackFrame_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)o); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_args); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)o; + if (p->_args) { + e = (*v)(p->_args, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)o; + tmp = ((PyObject*)p->_args); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_12ThreadTracer__args(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_12ThreadTracer__args(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_5__del__(o); + } +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_ThreadTracer[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_7__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_14_pydevd_bundle_13pydevd_cython_ThreadTracer[] = { + {(char *)"_args", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_12ThreadTracer__args, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_12ThreadTracer__args, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.ThreadTracer", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_3__call__, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_pydevd_cython(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_pydevd_cython}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "pydevd_cython", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_, __pyx_k_, sizeof(__pyx_k_), 0, 0, 1, 0}, + {&__pyx_kp_s_1, __pyx_k_1, sizeof(__pyx_k_1), 0, 0, 1, 0}, + {&__pyx_n_s_ALL, __pyx_k_ALL, sizeof(__pyx_k_ALL), 0, 0, 1, 1}, + {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_SET_FUNCTION_BREAK, __pyx_k_CMD_SET_FUNCTION_BREAK, sizeof(__pyx_k_CMD_SET_FUNCTION_BREAK), 0, 0, 1, 1}, + {&__pyx_n_s_DEBUG_START, __pyx_k_DEBUG_START, sizeof(__pyx_k_DEBUG_START), 0, 0, 1, 1}, + {&__pyx_n_s_DEBUG_START_PY3K, __pyx_k_DEBUG_START_PY3K, sizeof(__pyx_k_DEBUG_START_PY3K), 0, 0, 1, 1}, + {&__pyx_n_s_EXCEPTION_TYPE_HANDLED, __pyx_k_EXCEPTION_TYPE_HANDLED, sizeof(__pyx_k_EXCEPTION_TYPE_HANDLED), 0, 0, 1, 1}, + {&__pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED, __pyx_k_EXCEPTION_TYPE_USER_UNHANDLED, sizeof(__pyx_k_EXCEPTION_TYPE_USER_UNHANDLED), 0, 0, 1, 1}, + {&__pyx_kp_s_Error_in_linecache_checkcache_r, __pyx_k_Error_in_linecache_checkcache_r, sizeof(__pyx_k_Error_in_linecache_checkcache_r), 0, 0, 1, 0}, + {&__pyx_kp_s_Error_in_linecache_getline_r_s_f, __pyx_k_Error_in_linecache_getline_r_s_f, sizeof(__pyx_k_Error_in_linecache_getline_r_s_f), 0, 0, 1, 0}, + {&__pyx_n_s_ForkSafeLock, __pyx_k_ForkSafeLock, sizeof(__pyx_k_ForkSafeLock), 0, 0, 1, 1}, + {&__pyx_n_s_GeneratorExit, __pyx_k_GeneratorExit, sizeof(__pyx_k_GeneratorExit), 0, 0, 1, 1}, + {&__pyx_n_s_IGNORE_EXCEPTION_TAG, __pyx_k_IGNORE_EXCEPTION_TAG, sizeof(__pyx_k_IGNORE_EXCEPTION_TAG), 0, 0, 1, 1}, + {&__pyx_kp_s_IgnoreException, __pyx_k_IgnoreException, sizeof(__pyx_k_IgnoreException), 0, 0, 1, 0}, + {&__pyx_kp_s_Ignore_exception_s_in_library_s, __pyx_k_Ignore_exception_s_in_library_s, sizeof(__pyx_k_Ignore_exception_s_in_library_s), 0, 0, 1, 0}, + {&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2, __pyx_k_Incompatible_checksums_0x_x_vs_0_2, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_2), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_3, __pyx_k_Incompatible_checksums_0x_x_vs_0_3, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_3), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_4, __pyx_k_Incompatible_checksums_0x_x_vs_0_4, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_4), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_5, __pyx_k_Incompatible_checksums_0x_x_vs_0_5, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_5), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_6, __pyx_k_Incompatible_checksums_0x_x_vs_0_6, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_6), 0, 0, 1, 0}, + {&__pyx_n_s_KeyboardInterrupt, __pyx_k_KeyboardInterrupt, sizeof(__pyx_k_KeyboardInterrupt), 0, 0, 1, 1}, + {&__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER, __pyx_k_NORM_PATHS_AND_BASE_CONTAINER, sizeof(__pyx_k_NORM_PATHS_AND_BASE_CONTAINER), 0, 0, 1, 1}, + {&__pyx_n_s_NO_FTRACE, __pyx_k_NO_FTRACE, sizeof(__pyx_k_NO_FTRACE), 0, 0, 1, 1}, + {&__pyx_n_s_NameError, __pyx_k_NameError, sizeof(__pyx_k_NameError), 0, 0, 1, 1}, + {&__pyx_n_s_None, __pyx_k_None, sizeof(__pyx_k_None), 0, 0, 1, 1}, + {&__pyx_n_s_PYDEVD_IPYTHON_CONTEXT, __pyx_k_PYDEVD_IPYTHON_CONTEXT, sizeof(__pyx_k_PYDEVD_IPYTHON_CONTEXT), 0, 0, 1, 1}, + {&__pyx_n_s_PYDEV_FILE, __pyx_k_PYDEV_FILE, sizeof(__pyx_k_PYDEV_FILE), 0, 0, 1, 1}, + {&__pyx_n_s_PYTHON_SUSPEND, __pyx_k_PYTHON_SUSPEND, sizeof(__pyx_k_PYTHON_SUSPEND), 0, 0, 1, 1}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_PyDBAdditionalThreadInfo, __pyx_k_PyDBAdditionalThreadInfo, sizeof(__pyx_k_PyDBAdditionalThreadInfo), 0, 0, 1, 1}, + {&__pyx_n_s_PyDBFrame, __pyx_k_PyDBFrame, sizeof(__pyx_k_PyDBFrame), 0, 0, 1, 1}, + {&__pyx_n_s_RETURN_VALUES_DICT, __pyx_k_RETURN_VALUES_DICT, sizeof(__pyx_k_RETURN_VALUES_DICT), 0, 0, 1, 1}, + {&__pyx_n_s_STATE_RUN, __pyx_k_STATE_RUN, sizeof(__pyx_k_STATE_RUN), 0, 0, 1, 1}, + {&__pyx_n_s_SUPPORT_GEVENT, __pyx_k_SUPPORT_GEVENT, sizeof(__pyx_k_SUPPORT_GEVENT), 0, 0, 1, 1}, + {&__pyx_n_s_SafeCallWrapper, __pyx_k_SafeCallWrapper, sizeof(__pyx_k_SafeCallWrapper), 0, 0, 1, 1}, + {&__pyx_kp_s_State_s_Stop_s_Cmd_s_Kill_s, __pyx_k_State_s_Stop_s_Cmd_s_Kill_s, sizeof(__pyx_k_State_s_Stop_s_Cmd_s_Kill_s), 0, 0, 1, 0}, + {&__pyx_n_s_StopAsyncIteration, __pyx_k_StopAsyncIteration, sizeof(__pyx_k_StopAsyncIteration), 0, 0, 1, 1}, + {&__pyx_n_s_StopIteration, __pyx_k_StopIteration, sizeof(__pyx_k_StopIteration), 0, 0, 1, 1}, + {&__pyx_kp_s_Stop_inside_ipython_call, __pyx_k_Stop_inside_ipython_call, sizeof(__pyx_k_Stop_inside_ipython_call), 0, 0, 1, 0}, + {&__pyx_n_s_SystemExit, __pyx_k_SystemExit, sizeof(__pyx_k_SystemExit), 0, 0, 1, 1}, + {&__pyx_n_s_TRACE_PROPERTY, __pyx_k_TRACE_PROPERTY, sizeof(__pyx_k_TRACE_PROPERTY), 0, 0, 1, 1}, + {&__pyx_n_s_Thread, __pyx_k_Thread, sizeof(__pyx_k_Thread), 0, 0, 1, 1}, + {&__pyx_n_s_ThreadTracer, __pyx_k_ThreadTracer, sizeof(__pyx_k_ThreadTracer), 0, 0, 1, 1}, + {&__pyx_n_s_TopLevelThreadTracerNoBackFrame, __pyx_k_TopLevelThreadTracerNoBackFrame, sizeof(__pyx_k_TopLevelThreadTracerNoBackFrame), 0, 0, 1, 1}, + {&__pyx_n_s_TopLevelThreadTracerOnlyUnhandle, __pyx_k_TopLevelThreadTracerOnlyUnhandle, sizeof(__pyx_k_TopLevelThreadTracerOnlyUnhandle), 0, 0, 1, 1}, + {&__pyx_n_s_TryExceptContainerObj, __pyx_k_TryExceptContainerObj, sizeof(__pyx_k_TryExceptContainerObj), 0, 0, 1, 1}, + {&__pyx_n_s_USE_CUSTOM_SYS_CURRENT_FRAMES_MA, __pyx_k_USE_CUSTOM_SYS_CURRENT_FRAMES_MA, sizeof(__pyx_k_USE_CUSTOM_SYS_CURRENT_FRAMES_MA), 0, 0, 1, 1}, + {&__pyx_kp_s_Unable_to_get_topmost_frame_for, __pyx_k_Unable_to_get_topmost_frame_for, sizeof(__pyx_k_Unable_to_get_topmost_frame_for), 0, 0, 1, 0}, + {&__pyx_kp_s_Using_Cython_speedups, __pyx_k_Using_Cython_speedups, sizeof(__pyx_k_Using_Cython_speedups), 0, 0, 1, 0}, + {&__pyx_kp_s__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 0, 1, 0}, + {&__pyx_kp_s__5, __pyx_k__5, sizeof(__pyx_k__5), 0, 0, 1, 0}, + {&__pyx_kp_s__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 0, 1, 0}, + {&__pyx_kp_s__7, __pyx_k__7, sizeof(__pyx_k__7), 0, 0, 1, 0}, + {&__pyx_n_s_add, __pyx_k_add, sizeof(__pyx_k_add), 0, 0, 1, 1}, + {&__pyx_n_s_add_command, __pyx_k_add_command, sizeof(__pyx_k_add_command), 0, 0, 1, 1}, + {&__pyx_n_s_add_exception_to_frame, __pyx_k_add_exception_to_frame, sizeof(__pyx_k_add_exception_to_frame), 0, 0, 1, 1}, + {&__pyx_n_s_additional_info, __pyx_k_additional_info, sizeof(__pyx_k_additional_info), 0, 0, 1, 1}, + {&__pyx_n_s_append, __pyx_k_append, sizeof(__pyx_k_append), 0, 0, 1, 1}, + {&__pyx_n_s_apply_files_filter, __pyx_k_apply_files_filter, sizeof(__pyx_k_apply_files_filter), 0, 0, 1, 1}, + {&__pyx_n_s_apply_to_settrace, __pyx_k_apply_to_settrace, sizeof(__pyx_k_apply_to_settrace), 0, 0, 1, 1}, + {&__pyx_n_s_arg, __pyx_k_arg, sizeof(__pyx_k_arg), 0, 0, 1, 1}, + {&__pyx_n_s_args, __pyx_k_args, sizeof(__pyx_k_args), 0, 0, 1, 1}, + {&__pyx_n_s_args_2, __pyx_k_args_2, sizeof(__pyx_k_args_2), 0, 0, 1, 1}, + {&__pyx_n_s_basename, __pyx_k_basename, sizeof(__pyx_k_basename), 0, 0, 1, 1}, + {&__pyx_n_s_bootstrap, __pyx_k_bootstrap, sizeof(__pyx_k_bootstrap), 0, 0, 1, 1}, + {&__pyx_n_s_bootstrap_2, __pyx_k_bootstrap_2, sizeof(__pyx_k_bootstrap_2), 0, 0, 1, 1}, + {&__pyx_n_s_bootstrap_inner, __pyx_k_bootstrap_inner, sizeof(__pyx_k_bootstrap_inner), 0, 0, 1, 1}, + {&__pyx_n_s_bootstrap_inner_2, __pyx_k_bootstrap_inner_2, sizeof(__pyx_k_bootstrap_inner_2), 0, 0, 1, 1}, + {&__pyx_n_s_break_on_caught_exceptions, __pyx_k_break_on_caught_exceptions, sizeof(__pyx_k_break_on_caught_exceptions), 0, 0, 1, 1}, + {&__pyx_n_s_break_on_user_uncaught_exception, __pyx_k_break_on_user_uncaught_exception, sizeof(__pyx_k_break_on_user_uncaught_exception), 0, 0, 1, 1}, + {&__pyx_n_s_breakpoints, __pyx_k_breakpoints, sizeof(__pyx_k_breakpoints), 0, 0, 1, 1}, + {&__pyx_n_s_call, __pyx_k_call, sizeof(__pyx_k_call), 0, 0, 1, 1}, + {&__pyx_n_s_call_2, __pyx_k_call_2, sizeof(__pyx_k_call_2), 0, 0, 1, 1}, + {&__pyx_n_s_can_skip, __pyx_k_can_skip, sizeof(__pyx_k_can_skip), 0, 0, 1, 1}, + {&__pyx_kp_s_cell, __pyx_k_cell, sizeof(__pyx_k_cell), 0, 0, 1, 0}, + {&__pyx_n_s_checkcache, __pyx_k_checkcache, sizeof(__pyx_k_checkcache), 0, 0, 1, 1}, + {&__pyx_n_s_children_variants, __pyx_k_children_variants, sizeof(__pyx_k_children_variants), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_cmd_factory, __pyx_k_cmd_factory, sizeof(__pyx_k_cmd_factory), 0, 0, 1, 1}, + {&__pyx_n_s_cmd_step_into, __pyx_k_cmd_step_into, sizeof(__pyx_k_cmd_step_into), 0, 0, 1, 1}, + {&__pyx_n_s_cmd_step_over, __pyx_k_cmd_step_over, sizeof(__pyx_k_cmd_step_over), 0, 0, 1, 1}, + {&__pyx_n_s_co_filename, __pyx_k_co_filename, sizeof(__pyx_k_co_filename), 0, 0, 1, 1}, + {&__pyx_n_s_co_firstlineno, __pyx_k_co_firstlineno, sizeof(__pyx_k_co_firstlineno), 0, 0, 1, 1}, + {&__pyx_n_s_co_flags, __pyx_k_co_flags, sizeof(__pyx_k_co_flags), 0, 0, 1, 1}, + {&__pyx_n_s_co_name, __pyx_k_co_name, sizeof(__pyx_k_co_name), 0, 0, 1, 1}, + {&__pyx_n_s_collect_return_info, __pyx_k_collect_return_info, sizeof(__pyx_k_collect_return_info), 0, 0, 1, 1}, + {&__pyx_n_s_collect_try_except_info, __pyx_k_collect_try_except_info, sizeof(__pyx_k_collect_try_except_info), 0, 0, 1, 1}, + {&__pyx_n_s_compile, __pyx_k_compile, sizeof(__pyx_k_compile), 0, 0, 1, 1}, + {&__pyx_n_s_condition, __pyx_k_condition, sizeof(__pyx_k_condition), 0, 0, 1, 1}, + {&__pyx_n_s_constant_to_str, __pyx_k_constant_to_str, sizeof(__pyx_k_constant_to_str), 0, 0, 1, 1}, + {&__pyx_n_s_constructed_tid_to_last_frame, __pyx_k_constructed_tid_to_last_frame, sizeof(__pyx_k_constructed_tid_to_last_frame), 0, 0, 1, 1}, + {&__pyx_n_s_current_frames, __pyx_k_current_frames, sizeof(__pyx_k_current_frames), 0, 0, 1, 1}, + {&__pyx_n_s_debug, __pyx_k_debug, sizeof(__pyx_k_debug), 0, 0, 1, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_dis, __pyx_k_dis, sizeof(__pyx_k_dis), 0, 0, 1, 1}, + {&__pyx_n_s_disable_tracing, __pyx_k_disable_tracing, sizeof(__pyx_k_disable_tracing), 0, 0, 1, 1}, + {&__pyx_n_s_do_wait_suspend, __pyx_k_do_wait_suspend, sizeof(__pyx_k_do_wait_suspend), 0, 0, 1, 1}, + {&__pyx_n_s_enable_tracing, __pyx_k_enable_tracing, sizeof(__pyx_k_enable_tracing), 0, 0, 1, 1}, + {&__pyx_n_s_encode, __pyx_k_encode, sizeof(__pyx_k_encode), 0, 0, 1, 1}, + {&__pyx_n_s_endswith, __pyx_k_endswith, sizeof(__pyx_k_endswith), 0, 0, 1, 1}, + {&__pyx_n_s_enter, __pyx_k_enter, sizeof(__pyx_k_enter), 0, 0, 1, 1}, + {&__pyx_n_s_event, __pyx_k_event, sizeof(__pyx_k_event), 0, 0, 1, 1}, + {&__pyx_n_s_except_line, __pyx_k_except_line, sizeof(__pyx_k_except_line), 0, 0, 1, 1}, + {&__pyx_n_s_exception, __pyx_k_exception, sizeof(__pyx_k_exception), 0, 0, 1, 1}, + {&__pyx_n_s_exception_break, __pyx_k_exception_break, sizeof(__pyx_k_exception_break), 0, 0, 1, 1}, + {&__pyx_n_s_exception_type, __pyx_k_exception_type, sizeof(__pyx_k_exception_type), 0, 0, 1, 1}, + {&__pyx_n_s_exclude_exception_by_filter, __pyx_k_exclude_exception_by_filter, sizeof(__pyx_k_exclude_exception_by_filter), 0, 0, 1, 1}, + {&__pyx_n_s_exec, __pyx_k_exec, sizeof(__pyx_k_exec), 0, 0, 1, 1}, + {&__pyx_n_s_execfile, __pyx_k_execfile, sizeof(__pyx_k_execfile), 0, 0, 1, 1}, + {&__pyx_n_s_exit, __pyx_k_exit, sizeof(__pyx_k_exit), 0, 0, 1, 1}, + {&__pyx_n_s_expression, __pyx_k_expression, sizeof(__pyx_k_expression), 0, 0, 1, 1}, + {&__pyx_n_s_f_back, __pyx_k_f_back, sizeof(__pyx_k_f_back), 0, 0, 1, 1}, + {&__pyx_n_s_f_code, __pyx_k_f_code, sizeof(__pyx_k_f_code), 0, 0, 1, 1}, + {&__pyx_n_s_f_globals, __pyx_k_f_globals, sizeof(__pyx_k_f_globals), 0, 0, 1, 1}, + {&__pyx_n_s_f_lasti, __pyx_k_f_lasti, sizeof(__pyx_k_f_lasti), 0, 0, 1, 1}, + {&__pyx_n_s_f_lineno, __pyx_k_f_lineno, sizeof(__pyx_k_f_lineno), 0, 0, 1, 1}, + {&__pyx_n_s_f_locals, __pyx_k_f_locals, sizeof(__pyx_k_f_locals), 0, 0, 1, 1}, + {&__pyx_n_s_f_trace, __pyx_k_f_trace, sizeof(__pyx_k_f_trace), 0, 0, 1, 1}, + {&__pyx_n_s_f_unhandled, __pyx_k_f_unhandled, sizeof(__pyx_k_f_unhandled), 0, 0, 1, 1}, + {&__pyx_n_s_filename, __pyx_k_filename, sizeof(__pyx_k_filename), 0, 0, 1, 1}, + {&__pyx_n_s_filename_to_lines_where_exceptio, __pyx_k_filename_to_lines_where_exceptio, sizeof(__pyx_k_filename_to_lines_where_exceptio), 0, 0, 1, 1}, + {&__pyx_n_s_filename_to_stat_info, __pyx_k_filename_to_stat_info, sizeof(__pyx_k_filename_to_stat_info), 0, 0, 1, 1}, + {&__pyx_n_s_findlinestarts, __pyx_k_findlinestarts, sizeof(__pyx_k_findlinestarts), 0, 0, 1, 1}, + {&__pyx_n_s_fix_top_level_trace_and_get_trac, __pyx_k_fix_top_level_trace_and_get_trac, sizeof(__pyx_k_fix_top_level_trace_and_get_trac), 0, 0, 1, 1}, + {&__pyx_n_s_force_only_unhandled_tracer, __pyx_k_force_only_unhandled_tracer, sizeof(__pyx_k_force_only_unhandled_tracer), 0, 0, 1, 1}, + {&__pyx_n_s_frame, __pyx_k_frame, sizeof(__pyx_k_frame), 0, 0, 1, 1}, + {&__pyx_n_s_frame_trace_dispatch, __pyx_k_frame_trace_dispatch, sizeof(__pyx_k_frame_trace_dispatch), 0, 0, 1, 1}, + {&__pyx_n_s_func_name, __pyx_k_func_name, sizeof(__pyx_k_func_name), 0, 0, 1, 1}, + {&__pyx_n_s_function_breakpoint_name_to_brea, __pyx_k_function_breakpoint_name_to_brea, sizeof(__pyx_k_function_breakpoint_name_to_brea), 0, 0, 1, 1}, + {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, + {&__pyx_n_s_get_abs_path_real_path_and_base, __pyx_k_get_abs_path_real_path_and_base, sizeof(__pyx_k_get_abs_path_real_path_and_base), 0, 0, 1, 1}, + {&__pyx_n_s_get_breakpoint, __pyx_k_get_breakpoint, sizeof(__pyx_k_get_breakpoint), 0, 0, 1, 1}, + {&__pyx_n_s_get_clsname_for_code, __pyx_k_get_clsname_for_code, sizeof(__pyx_k_get_clsname_for_code), 0, 0, 1, 1}, + {&__pyx_n_s_get_current_thread_id, __pyx_k_get_current_thread_id, sizeof(__pyx_k_get_current_thread_id), 0, 0, 1, 1}, + {&__pyx_n_s_get_exception_breakpoint, __pyx_k_get_exception_breakpoint, sizeof(__pyx_k_get_exception_breakpoint), 0, 0, 1, 1}, + {&__pyx_n_s_get_file_type, __pyx_k_get_file_type, sizeof(__pyx_k_get_file_type), 0, 0, 1, 1}, + {&__pyx_n_s_get_smart_step_into_variant_from, __pyx_k_get_smart_step_into_variant_from, sizeof(__pyx_k_get_smart_step_into_variant_from), 0, 0, 1, 1}, + {&__pyx_n_s_get_trace_dispatch_func, __pyx_k_get_trace_dispatch_func, sizeof(__pyx_k_get_trace_dispatch_func), 0, 0, 1, 1}, + {&__pyx_n_s_getline, __pyx_k_getline, sizeof(__pyx_k_getline), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_global_cache_frame_skips, __pyx_k_global_cache_frame_skips, sizeof(__pyx_k_global_cache_frame_skips), 0, 0, 1, 1}, + {&__pyx_n_s_global_cache_skips, __pyx_k_global_cache_skips, sizeof(__pyx_k_global_cache_skips), 0, 0, 1, 1}, + {&__pyx_n_s_global_notify_skipped_step_in_l, __pyx_k_global_notify_skipped_step_in_l, sizeof(__pyx_k_global_notify_skipped_step_in_l), 0, 0, 1, 1}, + {&__pyx_n_s_handle_breakpoint_condition, __pyx_k_handle_breakpoint_condition, sizeof(__pyx_k_handle_breakpoint_condition), 0, 0, 1, 1}, + {&__pyx_n_s_handle_breakpoint_expression, __pyx_k_handle_breakpoint_expression, sizeof(__pyx_k_handle_breakpoint_expression), 0, 0, 1, 1}, + {&__pyx_n_s_handle_user_exception, __pyx_k_handle_user_exception, sizeof(__pyx_k_handle_user_exception), 0, 0, 1, 1}, + {&__pyx_n_s_has_condition, __pyx_k_has_condition, sizeof(__pyx_k_has_condition), 0, 0, 1, 1}, + {&__pyx_n_s_has_plugin_exception_breaks, __pyx_k_has_plugin_exception_breaks, sizeof(__pyx_k_has_plugin_exception_breaks), 0, 0, 1, 1}, + {&__pyx_n_s_has_plugin_line_breaks, __pyx_k_has_plugin_line_breaks, sizeof(__pyx_k_has_plugin_line_breaks), 0, 0, 1, 1}, + {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, + {&__pyx_n_s_id, __pyx_k_id, sizeof(__pyx_k_id), 0, 0, 1, 1}, + {&__pyx_n_s_ident, __pyx_k_ident, sizeof(__pyx_k_ident), 0, 0, 1, 1}, + {&__pyx_n_s_ignore_exception_trace, __pyx_k_ignore_exception_trace, sizeof(__pyx_k_ignore_exception_trace), 0, 0, 1, 1}, + {&__pyx_n_s_ignore_exceptions_thrown_in_line, __pyx_k_ignore_exceptions_thrown_in_line, sizeof(__pyx_k_ignore_exceptions_thrown_in_line), 0, 0, 1, 1}, + {&__pyx_n_s_ignore_system_exit_code, __pyx_k_ignore_system_exit_code, sizeof(__pyx_k_ignore_system_exit_code), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_in_project_scope, __pyx_k_in_project_scope, sizeof(__pyx_k_in_project_scope), 0, 0, 1, 1}, + {&__pyx_n_s_info, __pyx_k_info, sizeof(__pyx_k_info), 0, 0, 1, 1}, + {&__pyx_kp_s_invalid, __pyx_k_invalid, sizeof(__pyx_k_invalid), 0, 0, 1, 0}, + {&__pyx_n_s_is_files_filter_enabled, __pyx_k_is_files_filter_enabled, sizeof(__pyx_k_is_files_filter_enabled), 0, 0, 1, 1}, + {&__pyx_n_s_is_line_in_except_block, __pyx_k_is_line_in_except_block, sizeof(__pyx_k_is_line_in_except_block), 0, 0, 1, 1}, + {&__pyx_n_s_is_line_in_try_block, __pyx_k_is_line_in_try_block, sizeof(__pyx_k_is_line_in_try_block), 0, 0, 1, 1}, + {&__pyx_n_s_is_logpoint, __pyx_k_is_logpoint, sizeof(__pyx_k_is_logpoint), 0, 0, 1, 1}, + {&__pyx_n_s_is_thread_alive, __pyx_k_is_thread_alive, sizeof(__pyx_k_is_thread_alive), 0, 0, 1, 1}, + {&__pyx_n_s_j, __pyx_k_j, sizeof(__pyx_k_j), 0, 0, 1, 1}, + {&__pyx_n_s_just_raised, __pyx_k_just_raised, sizeof(__pyx_k_just_raised), 0, 0, 1, 1}, + {&__pyx_n_s_kwargs, __pyx_k_kwargs, sizeof(__pyx_k_kwargs), 0, 0, 1, 1}, + {&__pyx_kp_s_lambda, __pyx_k_lambda, sizeof(__pyx_k_lambda), 0, 0, 1, 0}, + {&__pyx_n_s_line, __pyx_k_line, sizeof(__pyx_k_line), 0, 0, 1, 1}, + {&__pyx_n_s_linecache, __pyx_k_linecache, sizeof(__pyx_k_linecache), 0, 0, 1, 1}, + {&__pyx_n_s_linesep, __pyx_k_linesep, sizeof(__pyx_k_linesep), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_main_2, __pyx_k_main_2, sizeof(__pyx_k_main_2), 0, 0, 1, 1}, + {&__pyx_n_s_make_io_message, __pyx_k_make_io_message, sizeof(__pyx_k_make_io_message), 0, 0, 1, 1}, + {&__pyx_n_s_match, __pyx_k_match, sizeof(__pyx_k_match), 0, 0, 1, 1}, + {&__pyx_n_s_method_object, __pyx_k_method_object, sizeof(__pyx_k_method_object), 0, 0, 1, 1}, + {&__pyx_kp_s_module, __pyx_k_module, sizeof(__pyx_k_module), 0, 0, 1, 0}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_name_2, __pyx_k_name_2, sizeof(__pyx_k_name_2), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_notify_on_first_raise_only, __pyx_k_notify_on_first_raise_only, sizeof(__pyx_k_notify_on_first_raise_only), 0, 0, 1, 1}, + {&__pyx_n_s_notify_skipped_step_in_because_o, __pyx_k_notify_skipped_step_in_because_o, sizeof(__pyx_k_notify_skipped_step_in_because_o), 0, 0, 1, 1}, + {&__pyx_n_s_notify_thread_not_alive, __pyx_k_notify_thread_not_alive, sizeof(__pyx_k_notify_thread_not_alive), 0, 0, 1, 1}, + {&__pyx_n_s_original_call, __pyx_k_original_call, sizeof(__pyx_k_original_call), 0, 0, 1, 1}, + {&__pyx_n_s_original_step_cmd, __pyx_k_original_step_cmd, sizeof(__pyx_k_original_step_cmd), 0, 0, 1, 1}, + {&__pyx_n_s_os, __pyx_k_os, sizeof(__pyx_k_os), 0, 0, 1, 1}, + {&__pyx_n_s_os_path, __pyx_k_os_path, sizeof(__pyx_k_os_path), 0, 0, 1, 1}, + {&__pyx_n_s_path, __pyx_k_path, sizeof(__pyx_k_path), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_plugin, __pyx_k_plugin, sizeof(__pyx_k_plugin), 0, 0, 1, 1}, + {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, + {&__pyx_n_s_py_db, __pyx_k_py_db, sizeof(__pyx_k_py_db), 0, 0, 1, 1}, + {&__pyx_kp_s_pyc, __pyx_k_pyc, sizeof(__pyx_k_pyc), 0, 0, 1, 0}, + {&__pyx_n_s_pydb_disposed, __pyx_k_pydb_disposed, sizeof(__pyx_k_pydb_disposed), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_bundle, __pyx_k_pydev_bundle, sizeof(__pyx_k_pydev_bundle), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_bundle__pydev_saved_modul, __pyx_k_pydev_bundle__pydev_saved_modul, sizeof(__pyx_k_pydev_bundle__pydev_saved_modul), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_bundle_pydev_is_thread_al, __pyx_k_pydev_bundle_pydev_is_thread_al, sizeof(__pyx_k_pydev_bundle_pydev_is_thread_al), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_bundle_pydev_log, __pyx_k_pydev_bundle_pydev_log, sizeof(__pyx_k_pydev_bundle_pydev_log), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_do_not_trace, __pyx_k_pydev_do_not_trace, sizeof(__pyx_k_pydev_do_not_trace), 0, 0, 1, 1}, + {&__pyx_kp_s_pydev_execfile_py, __pyx_k_pydev_execfile_py, sizeof(__pyx_k_pydev_execfile_py), 0, 0, 1, 0}, + {&__pyx_n_s_pydev_log, __pyx_k_pydev_log, sizeof(__pyx_k_pydev_log), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_log_exception, __pyx_k_pydev_log_exception, sizeof(__pyx_k_pydev_log_exception), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_monkey, __pyx_k_pydev_monkey, sizeof(__pyx_k_pydev_monkey), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd, __pyx_k_pydevd, sizeof(__pyx_k_pydevd), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle, __pyx_k_pydevd_bundle, sizeof(__pyx_k_pydevd_bundle), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_bytecode_u, __pyx_k_pydevd_bundle_pydevd_bytecode_u, sizeof(__pyx_k_pydevd_bundle_pydevd_bytecode_u), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_comm_const, __pyx_k_pydevd_bundle_pydevd_comm_const, sizeof(__pyx_k_pydevd_bundle_pydevd_comm_const), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_k_pydevd_bundle_pydevd_constants, sizeof(__pyx_k_pydevd_bundle_pydevd_constants), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_cython, __pyx_k_pydevd_bundle_pydevd_cython, sizeof(__pyx_k_pydevd_bundle_pydevd_cython), 0, 0, 1, 1}, + {&__pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_k_pydevd_bundle_pydevd_cython_pyx, sizeof(__pyx_k_pydevd_bundle_pydevd_cython_pyx), 0, 0, 1, 0}, + {&__pyx_n_s_pydevd_bundle_pydevd_frame_util, __pyx_k_pydevd_bundle_pydevd_frame_util, sizeof(__pyx_k_pydevd_bundle_pydevd_frame_util), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_utils, __pyx_k_pydevd_bundle_pydevd_utils, sizeof(__pyx_k_pydevd_bundle_pydevd_utils), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_dont_trace, __pyx_k_pydevd_dont_trace, sizeof(__pyx_k_pydevd_dont_trace), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_file_utils, __pyx_k_pydevd_file_utils, sizeof(__pyx_k_pydevd_file_utils), 0, 0, 1, 1}, + {&__pyx_kp_s_pydevd_py, __pyx_k_pydevd_py, sizeof(__pyx_k_pydevd_py), 0, 0, 1, 0}, + {&__pyx_kp_s_pydevd_traceproperty_py, __pyx_k_pydevd_traceproperty_py, sizeof(__pyx_k_pydevd_traceproperty_py), 0, 0, 1, 0}, + {&__pyx_n_s_pydevd_tracing, __pyx_k_pydevd_tracing, sizeof(__pyx_k_pydevd_tracing), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_PyDBAdditionalThr, __pyx_k_pyx_unpickle_PyDBAdditionalThr, sizeof(__pyx_k_pyx_unpickle_PyDBAdditionalThr), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_PyDBFrame, __pyx_k_pyx_unpickle_PyDBFrame, sizeof(__pyx_k_pyx_unpickle_PyDBFrame), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_SafeCallWrapper, __pyx_k_pyx_unpickle_SafeCallWrapper, sizeof(__pyx_k_pyx_unpickle_SafeCallWrapper), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_ThreadTracer, __pyx_k_pyx_unpickle_ThreadTracer, sizeof(__pyx_k_pyx_unpickle_ThreadTracer), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_TopLevelThreadTra, __pyx_k_pyx_unpickle_TopLevelThreadTra, sizeof(__pyx_k_pyx_unpickle_TopLevelThreadTra), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_TopLevelThreadTra_2, __pyx_k_pyx_unpickle_TopLevelThreadTra_2, sizeof(__pyx_k_pyx_unpickle_TopLevelThreadTra_2), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle__TryExceptContain, __pyx_k_pyx_unpickle__TryExceptContain, sizeof(__pyx_k_pyx_unpickle__TryExceptContain), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_qname, __pyx_k_qname, sizeof(__pyx_k_qname), 0, 0, 1, 1}, + {&__pyx_n_s_quitting, __pyx_k_quitting, sizeof(__pyx_k_quitting), 0, 0, 1, 1}, + {&__pyx_n_s_raise_lines_in_except, __pyx_k_raise_lines_in_except, sizeof(__pyx_k_raise_lines_in_except), 0, 0, 1, 1}, + {&__pyx_n_s_re, __pyx_k_re, sizeof(__pyx_k_re), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_remove_exception_from_frame, __pyx_k_remove_exception_from_frame, sizeof(__pyx_k_remove_exception_from_frame), 0, 0, 1, 1}, + {&__pyx_n_s_remove_return_values_flag, __pyx_k_remove_return_values_flag, sizeof(__pyx_k_remove_return_values_flag), 0, 0, 1, 1}, + {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, + {&__pyx_n_s_return_line, __pyx_k_return_line, sizeof(__pyx_k_return_line), 0, 0, 1, 1}, + {&__pyx_n_s_returns, __pyx_k_returns, sizeof(__pyx_k_returns), 0, 0, 1, 1}, + {&__pyx_n_s_rfind, __pyx_k_rfind, sizeof(__pyx_k_rfind), 0, 0, 1, 1}, + {&__pyx_n_s_run, __pyx_k_run, sizeof(__pyx_k_run), 0, 0, 1, 1}, + {&__pyx_kp_s_s_s, __pyx_k_s_s, sizeof(__pyx_k_s_s), 0, 0, 1, 0}, + {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, + {&__pyx_n_s_send_caught_exception_stack, __pyx_k_send_caught_exception_stack, sizeof(__pyx_k_send_caught_exception_stack), 0, 0, 1, 1}, + {&__pyx_n_s_send_caught_exception_stack_proc, __pyx_k_send_caught_exception_stack_proc, sizeof(__pyx_k_send_caught_exception_stack_proc), 0, 0, 1, 1}, + {&__pyx_n_s_set_additional_thread_info, __pyx_k_set_additional_thread_info, sizeof(__pyx_k_set_additional_thread_info), 0, 0, 1, 1}, + {&__pyx_n_s_set_additional_thread_info_lock, __pyx_k_set_additional_thread_info_lock, sizeof(__pyx_k_set_additional_thread_info_lock), 0, 0, 1, 1}, + {&__pyx_n_s_set_suspend, __pyx_k_set_suspend, sizeof(__pyx_k_set_suspend), 0, 0, 1, 1}, + {&__pyx_n_s_set_trace_for_frame_and_parents, __pyx_k_set_trace_for_frame_and_parents, sizeof(__pyx_k_set_trace_for_frame_and_parents), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_should_trace_hook, __pyx_k_should_trace_hook, sizeof(__pyx_k_should_trace_hook), 0, 0, 1, 1}, + {&__pyx_n_s_show_return_values, __pyx_k_show_return_values, sizeof(__pyx_k_show_return_values), 0, 0, 1, 1}, + {&__pyx_n_s_skip_on_exceptions_thrown_in_sam, __pyx_k_skip_on_exceptions_thrown_in_sam, sizeof(__pyx_k_skip_on_exceptions_thrown_in_sam), 0, 0, 1, 1}, + {&__pyx_n_s_st_mtime, __pyx_k_st_mtime, sizeof(__pyx_k_st_mtime), 0, 0, 1, 1}, + {&__pyx_n_s_st_size, __pyx_k_st_size, sizeof(__pyx_k_st_size), 0, 0, 1, 1}, + {&__pyx_n_s_startswith, __pyx_k_startswith, sizeof(__pyx_k_startswith), 0, 0, 1, 1}, + {&__pyx_n_s_stat, __pyx_k_stat, sizeof(__pyx_k_stat), 0, 0, 1, 1}, + {&__pyx_n_s_stop, __pyx_k_stop, sizeof(__pyx_k_stop), 0, 0, 1, 1}, + {&__pyx_n_s_stop_on_unhandled_exception, __pyx_k_stop_on_unhandled_exception, sizeof(__pyx_k_stop_on_unhandled_exception), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_suspend, __pyx_k_suspend, sizeof(__pyx_k_suspend), 0, 0, 1, 1}, + {&__pyx_n_s_suspend_other_threads, __pyx_k_suspend_other_threads, sizeof(__pyx_k_suspend_other_threads), 0, 0, 1, 1}, + {&__pyx_n_s_suspend_policy, __pyx_k_suspend_policy, sizeof(__pyx_k_suspend_policy), 0, 0, 1, 1}, + {&__pyx_n_s_suspended_at_unhandled, __pyx_k_suspended_at_unhandled, sizeof(__pyx_k_suspended_at_unhandled), 0, 0, 1, 1}, + {&__pyx_n_s_t, __pyx_k_t, sizeof(__pyx_k_t), 0, 0, 1, 1}, + {&__pyx_n_s_tb_frame, __pyx_k_tb_frame, sizeof(__pyx_k_tb_frame), 0, 0, 1, 1}, + {&__pyx_n_s_tb_lineno, __pyx_k_tb_lineno, sizeof(__pyx_k_tb_lineno), 0, 0, 1, 1}, + {&__pyx_n_s_tb_next, __pyx_k_tb_next, sizeof(__pyx_k_tb_next), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_thread, __pyx_k_thread, sizeof(__pyx_k_thread), 0, 0, 1, 1}, + {&__pyx_n_s_thread_trace_func, __pyx_k_thread_trace_func, sizeof(__pyx_k_thread_trace_func), 0, 0, 1, 1}, + {&__pyx_n_s_thread_tracer, __pyx_k_thread_tracer, sizeof(__pyx_k_thread_tracer), 0, 0, 1, 1}, + {&__pyx_n_s_threading, __pyx_k_threading, sizeof(__pyx_k_threading), 0, 0, 1, 1}, + {&__pyx_n_s_threading_active, __pyx_k_threading_active, sizeof(__pyx_k_threading_active), 0, 0, 1, 1}, + {&__pyx_n_s_threading_current_thread, __pyx_k_threading_current_thread, sizeof(__pyx_k_threading_current_thread), 0, 0, 1, 1}, + {&__pyx_n_s_threading_get_ident, __pyx_k_threading_get_ident, sizeof(__pyx_k_threading_get_ident), 0, 0, 1, 1}, + {&__pyx_n_s_top_level_thread_tracer, __pyx_k_top_level_thread_tracer, sizeof(__pyx_k_top_level_thread_tracer), 0, 0, 1, 1}, + {&__pyx_n_s_top_level_thread_tracer_no_back, __pyx_k_top_level_thread_tracer_no_back, sizeof(__pyx_k_top_level_thread_tracer_no_back), 0, 0, 1, 1}, + {&__pyx_n_s_top_level_thread_tracer_unhandle, __pyx_k_top_level_thread_tracer_unhandle, sizeof(__pyx_k_top_level_thread_tracer_unhandle), 0, 0, 1, 1}, + {&__pyx_n_s_trace, __pyx_k_trace, sizeof(__pyx_k_trace), 0, 0, 1, 1}, + {&__pyx_n_s_trace_dispatch, __pyx_k_trace_dispatch, sizeof(__pyx_k_trace_dispatch), 0, 0, 1, 1}, + {&__pyx_n_s_trace_dispatch_and_unhandled_exc, __pyx_k_trace_dispatch_and_unhandled_exc, sizeof(__pyx_k_trace_dispatch_and_unhandled_exc), 0, 0, 1, 1}, + {&__pyx_n_s_trace_exception, __pyx_k_trace_exception, sizeof(__pyx_k_trace_exception), 0, 0, 1, 1}, + {&__pyx_n_s_trace_unhandled_exceptions, __pyx_k_trace_unhandled_exceptions, sizeof(__pyx_k_trace_unhandled_exceptions), 0, 0, 1, 1}, + {&__pyx_n_s_try_exc_info, __pyx_k_try_exc_info, sizeof(__pyx_k_try_exc_info), 0, 0, 1, 1}, + {&__pyx_n_s_try_except_infos, __pyx_k_try_except_infos, sizeof(__pyx_k_try_except_infos), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_kp_s_utf_8, __pyx_k_utf_8, sizeof(__pyx_k_utf_8), 0, 0, 1, 0}, + {&__pyx_n_s_values, __pyx_k_values, sizeof(__pyx_k_values), 0, 0, 1, 1}, + {&__pyx_n_s_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 0, 1, 1}, + {&__pyx_n_s_writer, __pyx_k_writer, sizeof(__pyx_k_writer), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(0, 174, __pyx_L1_error) + __pyx_builtin_NameError = __Pyx_GetBuiltinName(__pyx_n_s_NameError); if (!__pyx_builtin_NameError) __PYX_ERR(0, 207, __pyx_L1_error) + __pyx_builtin_StopIteration = __Pyx_GetBuiltinName(__pyx_n_s_StopIteration); if (!__pyx_builtin_StopIteration) __PYX_ERR(0, 208, __pyx_L1_error) + __pyx_builtin_id = __Pyx_GetBuiltinName(__pyx_n_s_id); if (!__pyx_builtin_id) __PYX_ERR(0, 130, __pyx_L1_error) + __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 149, __pyx_L1_error) + __pyx_builtin_SystemExit = __Pyx_GetBuiltinName(__pyx_n_s_SystemExit); if (!__pyx_builtin_SystemExit) __PYX_ERR(0, 375, __pyx_L1_error) + __pyx_builtin_GeneratorExit = __Pyx_GetBuiltinName(__pyx_n_s_GeneratorExit); if (!__pyx_builtin_GeneratorExit) __PYX_ERR(0, 378, __pyx_L1_error) + __pyx_builtin_KeyboardInterrupt = __Pyx_GetBuiltinName(__pyx_n_s_KeyboardInterrupt); if (!__pyx_builtin_KeyboardInterrupt) __PYX_ERR(0, 1367, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":151 + * raise AttributeError() + * except: + * with _set_additional_thread_info_lock: # <<<<<<<<<<<<<< + * # If it's not there, set it within a lock to avoid any racing + * # conditions. + */ + __pyx_tuple__2 = PyTuple_Pack(3, Py_None, Py_None, Py_None); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 151, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "_pydevd_bundle/pydevd_cython.pyx":1177 + * filename = frame.f_code.co_filename + * if filename.endswith('.pyc'): + * filename = filename[:-1] # <<<<<<<<<<<<<< + * + * if not filename.endswith(PYDEVD_IPYTHON_CONTEXT[0]): + */ + __pyx_slice__4 = PySlice_New(Py_None, __pyx_int_neg_1, Py_None); if (unlikely(!__pyx_slice__4)) __PYX_ERR(0, 1177, __pyx_L1_error) + __Pyx_GOTREF(__pyx_slice__4); + __Pyx_GIVEREF(__pyx_slice__4); + + /* "_pydevd_bundle/pydevd_cython.pyx":1489 + * if f_unhandled.f_code.co_name in ('__bootstrap', '_bootstrap'): + * # We need __bootstrap_inner, not __bootstrap. + * return None, False # <<<<<<<<<<<<<< + * + * elif f_unhandled.f_code.co_name in ('__bootstrap_inner', '_bootstrap_inner'): + */ + __pyx_tuple__8 = PyTuple_Pack(2, Py_None, Py_False); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 1489, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x75b3b02, 0x5f02be1, 0xa5a0d63): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x75b3b02, 0x5f02be1, 0xa5a0d63) = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_original_step_cmd, pydev_smart_child_offset, pydev_smart_parent_offset, pydev_smart_step_into_variants, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, pydev_use_scoped_step_frame, step_in_initial_location, suspend_type, suspended_at_unhandled, target_id_to_smart_step_into_variant, thread_tracer, top_level_thread_tracer_no_back_frames, top_level_thread_tracer_unhandled, trace_suspend_type))" % __pyx_checksum) + */ + __pyx_tuple__9 = PyTuple_Pack(3, __pyx_int_123419394, __pyx_int_99625953, __pyx_int_173673827); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + __pyx_tuple__10 = PyTuple_Pack(3, __pyx_int_210464433, __pyx_int_230645316, __pyx_int_232881363); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); + __pyx_tuple__11 = PyTuple_Pack(3, __pyx_int_84338306, __pyx_int_61391470, __pyx_int_192493205); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); + __pyx_tuple__12 = PyTuple_Pack(3, __pyx_int_125568891, __pyx_int_169093275, __pyx_int_63705258); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + __pyx_tuple__13 = PyTuple_Pack(3, __pyx_int_64458794, __pyx_int_18997755, __pyx_int_255484337); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__13); + __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_tuple__14 = PyTuple_Pack(3, __pyx_int_171613889, __pyx_int_66451433, __pyx_int_16751766); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(2, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__14); + __Pyx_GIVEREF(__pyx_tuple__14); + + /* "_pydevd_bundle/pydevd_cython.pyx":11 + * from _pydev_bundle import pydev_log + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * pydev_log.debug("Using Cython speedups") # <<<<<<<<<<<<<< + * # ELSE + * # from _pydevd_bundle.pydevd_frame import PyDBFrame + */ + __pyx_tuple__15 = PyTuple_Pack(1, __pyx_kp_s_Using_Cython_speedups); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + + /* "_pydevd_bundle/pydevd_cython.pyx":145 + * + * + * def set_additional_thread_info(thread): # <<<<<<<<<<<<<< + * try: + * additional_info = thread.additional_info + */ + __pyx_tuple__16 = PyTuple_Pack(2, __pyx_n_s_thread, __pyx_n_s_additional_info); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(0, 145, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__16); + __Pyx_GIVEREF(__pyx_tuple__16); + __pyx_codeobj__17 = (PyObject*)__Pyx_PyCode_New(1, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__16, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_set_additional_thread_info, 145, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__17)) __PYX_ERR(0, 145, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":176 + * except ImportError: + * + * def get_smart_step_into_variant_from_frame_offset(*args, **kwargs): # <<<<<<<<<<<<<< + * return None + * + */ + __pyx_tuple__18 = PyTuple_Pack(2, __pyx_n_s_args, __pyx_n_s_kwargs); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(0, 176, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__18); + __Pyx_GIVEREF(__pyx_tuple__18); + __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__18, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_get_smart_step_into_variant_from, 176, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(0, 176, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":198 + * basename = os.path.basename + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') # <<<<<<<<<<<<<< + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + */ + __pyx_tuple__20 = PyTuple_Pack(1, __pyx_kp_s_IgnoreException); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(0, 198, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__20); + __Pyx_GIVEREF(__pyx_tuple__20); + + /* "_pydevd_bundle/pydevd_cython.pyx":199 + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + * DEBUG_START = ('pydevd.py', 'run') # <<<<<<<<<<<<<< + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + */ + __pyx_tuple__21 = PyTuple_Pack(2, __pyx_kp_s_pydevd_py, __pyx_n_s_run); if (unlikely(!__pyx_tuple__21)) __PYX_ERR(0, 199, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__21); + __Pyx_GIVEREF(__pyx_tuple__21); + + /* "_pydevd_bundle/pydevd_cython.pyx":200 + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') # <<<<<<<<<<<<<< + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + * + */ + __pyx_tuple__22 = PyTuple_Pack(2, __pyx_kp_s_pydev_execfile_py, __pyx_n_s_execfile); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(0, 200, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__22); + __Pyx_GIVEREF(__pyx_tuple__22); + + /* "_pydevd_bundle/pydevd_cython.pyx":1422 + * + * + * def notify_skipped_step_in_because_of_filters(py_db, frame): # <<<<<<<<<<<<<< + * global _global_notify_skipped_step_in + * + */ + __pyx_tuple__23 = PyTuple_Pack(2, __pyx_n_s_py_db, __pyx_n_s_frame); if (unlikely(!__pyx_tuple__23)) __PYX_ERR(0, 1422, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__23); + __Pyx_GIVEREF(__pyx_tuple__23); + __pyx_codeobj__24 = (PyObject*)__Pyx_PyCode_New(2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__23, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_notify_skipped_step_in_because_o, 1422, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__24)) __PYX_ERR(0, 1422, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1452 + * + * + * def fix_top_level_trace_and_get_trace_func(py_db, frame): # <<<<<<<<<<<<<< + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef str filename; + */ + __pyx_tuple__25 = PyTuple_Pack(15, __pyx_n_s_py_db, __pyx_n_s_frame, __pyx_n_s_filename, __pyx_n_s_name_2, __pyx_n_s_args, __pyx_n_s_thread, __pyx_n_s_f_unhandled, __pyx_n_s_force_only_unhandled_tracer, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_t, __pyx_n_s_additional_info, __pyx_n_s_top_level_thread_tracer, __pyx_n_s_f_trace, __pyx_n_s_thread_tracer); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(0, 1452, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__25); + __Pyx_GIVEREF(__pyx_tuple__25); + __pyx_codeobj__26 = (PyObject*)__Pyx_PyCode_New(2, 0, 15, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_fix_top_level_trace_and_get_trac, 1452, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__26)) __PYX_ERR(0, 1452, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1580 + * + * + * def trace_dispatch(py_db, frame, event, arg): # <<<<<<<<<<<<<< + * thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) + * if thread_trace_func is None: + */ + __pyx_tuple__27 = PyTuple_Pack(6, __pyx_n_s_py_db, __pyx_n_s_frame, __pyx_n_s_event, __pyx_n_s_arg, __pyx_n_s_thread_trace_func, __pyx_n_s_apply_to_settrace); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(0, 1580, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__27); + __Pyx_GIVEREF(__pyx_tuple__27); + __pyx_codeobj__28 = (PyObject*)__Pyx_PyCode_New(4, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__27, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_trace_dispatch, 1580, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__28)) __PYX_ERR(0, 1580, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1866 + * _original_call = ThreadTracer.__call__ + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * constructed_tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) + */ + __pyx_tuple__29 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_frame, __pyx_n_s_event, __pyx_n_s_arg); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(0, 1866, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__29); + __Pyx_GIVEREF(__pyx_tuple__29); + __pyx_codeobj__30 = (PyObject*)__Pyx_PyCode_New(4, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__29, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_call_2, 1866, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__30)) __PYX_ERR(0, 1866, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_tuple__31 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__31)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__31); + __Pyx_GIVEREF(__pyx_tuple__31); + __pyx_codeobj__32 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__31, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_PyDBAdditionalThr, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__32)) __PYX_ERR(2, 1, __pyx_L1_error) + __pyx_tuple__33 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__33)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__33); + __Pyx_GIVEREF(__pyx_tuple__33); + __pyx_codeobj__34 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__33, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle__TryExceptContain, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__34)) __PYX_ERR(2, 1, __pyx_L1_error) + __pyx_tuple__35 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__35)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__35); + __Pyx_GIVEREF(__pyx_tuple__35); + __pyx_codeobj__36 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__35, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_PyDBFrame, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__36)) __PYX_ERR(2, 1, __pyx_L1_error) + __pyx_tuple__37 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__37)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__37); + __Pyx_GIVEREF(__pyx_tuple__37); + __pyx_codeobj__38 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__37, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_SafeCallWrapper, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__38)) __PYX_ERR(2, 1, __pyx_L1_error) + __pyx_tuple__39 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__39)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__39); + __Pyx_GIVEREF(__pyx_tuple__39); + __pyx_codeobj__40 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__39, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_TopLevelThreadTra, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__40)) __PYX_ERR(2, 1, __pyx_L1_error) + __pyx_tuple__41 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__41)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__41); + __Pyx_GIVEREF(__pyx_tuple__41); + __pyx_codeobj__42 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__41, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_TopLevelThreadTra_2, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__42)) __PYX_ERR(2, 1, __pyx_L1_error) + __pyx_tuple__43 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__43)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__43); + __Pyx_GIVEREF(__pyx_tuple__43); + __pyx_codeobj__44 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__43, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_ThreadTracer, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__44)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + __pyx_umethod_PyDict_Type_get.type = (PyObject*)&PyDict_Type; + __pyx_umethod_PyDict_Type_update.type = (PyObject*)&PyDict_Type; + __pyx_umethod_PyDict_Type_values.type = (PyObject*)&PyDict_Type; + __pyx_umethod_PyString_Type_rfind.type = (PyObject*)&PyString_Type; + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_2 = PyInt_FromLong(2); if (unlikely(!__pyx_int_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_11 = PyInt_FromLong(11); if (unlikely(!__pyx_int_11)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_111 = PyInt_FromLong(111); if (unlikely(!__pyx_int_111)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_137 = PyInt_FromLong(137); if (unlikely(!__pyx_int_137)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_160 = PyInt_FromLong(160); if (unlikely(!__pyx_int_160)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_16751766 = PyInt_FromLong(16751766L); if (unlikely(!__pyx_int_16751766)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_18997755 = PyInt_FromLong(18997755L); if (unlikely(!__pyx_int_18997755)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_61391470 = PyInt_FromLong(61391470L); if (unlikely(!__pyx_int_61391470)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_63705258 = PyInt_FromLong(63705258L); if (unlikely(!__pyx_int_63705258)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_64458794 = PyInt_FromLong(64458794L); if (unlikely(!__pyx_int_64458794)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_66451433 = PyInt_FromLong(66451433L); if (unlikely(!__pyx_int_66451433)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_84338306 = PyInt_FromLong(84338306L); if (unlikely(!__pyx_int_84338306)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_99625953 = PyInt_FromLong(99625953L); if (unlikely(!__pyx_int_99625953)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_123419394 = PyInt_FromLong(123419394L); if (unlikely(!__pyx_int_123419394)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_125568891 = PyInt_FromLong(125568891L); if (unlikely(!__pyx_int_125568891)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_169093275 = PyInt_FromLong(169093275L); if (unlikely(!__pyx_int_169093275)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_171613889 = PyInt_FromLong(171613889L); if (unlikely(!__pyx_int_171613889)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_173673827 = PyInt_FromLong(173673827L); if (unlikely(!__pyx_int_173673827)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_192493205 = PyInt_FromLong(192493205L); if (unlikely(!__pyx_int_192493205)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_210464433 = PyInt_FromLong(210464433L); if (unlikely(!__pyx_int_210464433)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_230645316 = PyInt_FromLong(230645316L); if (unlikely(!__pyx_int_230645316)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_232881363 = PyInt_FromLong(232881363L); if (unlikely(!__pyx_int_232881363)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_255484337 = PyInt_FromLong(255484337L); if (unlikely(!__pyx_int_255484337)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_neg_1 = PyInt_FromLong(-1); if (unlikely(!__pyx_int_neg_1)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __pyx_v_14_pydevd_bundle_13pydevd_cython__global_notify_skipped_step_in = ((PyObject*)Py_None); Py_INCREF(Py_None); + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo) < 0) __PYX_ERR(0, 23, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo.tp_dictoffset && __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_PyDBAdditionalThreadInfo, (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo) < 0) __PYX_ERR(0, 23, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo) < 0) __PYX_ERR(0, 23, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo = &__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo; + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj) < 0) __PYX_ERR(0, 255, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj.tp_dictoffset && __pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_TryExceptContainerObj, (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj) < 0) __PYX_ERR(0, 255, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj) < 0) __PYX_ERR(0, 255, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj = &__pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj; + __pyx_vtabptr_14_pydevd_bundle_13pydevd_cython_PyDBFrame = &__pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame._should_stop_on_exception = (PyObject *(*)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *, PyObject *))__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__should_stop_on_exception; + __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame._handle_exception = (PyObject *(*)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *, PyObject *, PyObject *))__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__handle_exception; + __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame.get_func_name = (PyObject *(*)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *))__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_get_func_name; + __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame._show_return_values = (PyObject *(*)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *))__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__show_return_values; + __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame._remove_return_values = (PyObject *(*)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *))__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__remove_return_values; + __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame._get_unfiltered_back_frame = (PyObject *(*)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *))__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__get_unfiltered_back_frame; + __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame._is_same_frame = (PyObject *(*)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *))__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame__is_same_frame; + __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame.trace_dispatch = (PyObject *(*)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *, PyObject *, int __pyx_skip_dispatch))__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispatch; + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame) < 0) __PYX_ERR(0, 273, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame.tp_dictoffset && __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (__Pyx_SetVtable(__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame.tp_dict, __pyx_vtabptr_14_pydevd_bundle_13pydevd_cython_PyDBFrame) < 0) __PYX_ERR(0, 273, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_PyDBFrame, (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame) < 0) __PYX_ERR(0, 273, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame) < 0) __PYX_ERR(0, 273, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame = &__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper) < 0) __PYX_ERR(0, 1434, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper.tp_dictoffset && __pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_SafeCallWrapper, (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper) < 0) __PYX_ERR(0, 1434, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper) < 0) __PYX_ERR(0, 1434, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper = &__pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper; + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions) < 0) __PYX_ERR(0, 1590, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions.tp_dictoffset && __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_TopLevelThreadTracerOnlyUnhandle, (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions) < 0) __PYX_ERR(0, 1590, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions) < 0) __PYX_ERR(0, 1590, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions = &__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions; + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame) < 0) __PYX_ERR(0, 1620, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame.tp_dictoffset && __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_TopLevelThreadTracerNoBackFrame, (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame) < 0) __PYX_ERR(0, 1620, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame) < 0) __PYX_ERR(0, 1620, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame = &__pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame; + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer) < 0) __PYX_ERR(0, 1695, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer.tp_dictoffset && __pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + #if CYTHON_UPDATE_DESCRIPTOR_DOC + { + PyObject *wrapper = PyObject_GetAttrString((PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer, "__call__"); if (unlikely(!wrapper)) __PYX_ERR(0, 1695, __pyx_L1_error) + if (Py_TYPE(wrapper) == &PyWrapperDescr_Type) { + __pyx_wrapperbase_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__ = *((PyWrapperDescrObject *)wrapper)->d_base; + __pyx_wrapperbase_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__.doc = __pyx_doc_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__; + ((PyWrapperDescrObject *)wrapper)->d_base = &__pyx_wrapperbase_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__; + } + } + #endif + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_ThreadTracer, (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer) < 0) __PYX_ERR(0, 1695, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer) < 0) __PYX_ERR(0, 1695, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer = &__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(3, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initpydevd_cython(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initpydevd_cython(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_pydevd_cython(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_pydevd_cython(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec_pydevd_cython(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module 'pydevd_cython' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_pydevd_cython(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("pydevd_cython", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_b); + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_cython_runtime); + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main__pydevd_bundle__pydevd_cython) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "_pydevd_bundle.pydevd_cython")) { + if (unlikely(PyDict_SetItemString(modules, "_pydevd_bundle.pydevd_cython", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "_pydevd_bundle/pydevd_cython.pyx":7 + * # DO NOT edit manually! + * # DO NOT edit manually! + * from _pydevd_bundle.pydevd_constants import (STATE_RUN, PYTHON_SUSPEND, SUPPORT_GEVENT, ForkSafeLock, # <<<<<<<<<<<<<< + * _current_frames) + * from _pydev_bundle import pydev_log + */ + __pyx_t_1 = PyList_New(5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_STATE_RUN); + __Pyx_GIVEREF(__pyx_n_s_STATE_RUN); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_STATE_RUN); + __Pyx_INCREF(__pyx_n_s_PYTHON_SUSPEND); + __Pyx_GIVEREF(__pyx_n_s_PYTHON_SUSPEND); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_PYTHON_SUSPEND); + __Pyx_INCREF(__pyx_n_s_SUPPORT_GEVENT); + __Pyx_GIVEREF(__pyx_n_s_SUPPORT_GEVENT); + PyList_SET_ITEM(__pyx_t_1, 2, __pyx_n_s_SUPPORT_GEVENT); + __Pyx_INCREF(__pyx_n_s_ForkSafeLock); + __Pyx_GIVEREF(__pyx_n_s_ForkSafeLock); + PyList_SET_ITEM(__pyx_t_1, 3, __pyx_n_s_ForkSafeLock); + __Pyx_INCREF(__pyx_n_s_current_frames); + __Pyx_GIVEREF(__pyx_n_s_current_frames); + PyList_SET_ITEM(__pyx_t_1, 4, __pyx_n_s_current_frames); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_STATE_RUN); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_STATE_RUN, __pyx_t_1) < 0) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_PYTHON_SUSPEND); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_PYTHON_SUSPEND, __pyx_t_1) < 0) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_SUPPORT_GEVENT); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_SUPPORT_GEVENT, __pyx_t_1) < 0) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_ForkSafeLock); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_ForkSafeLock, __pyx_t_1) < 0) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_current_frames); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_current_frames, __pyx_t_1) < 0) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":9 + * from _pydevd_bundle.pydevd_constants import (STATE_RUN, PYTHON_SUSPEND, SUPPORT_GEVENT, ForkSafeLock, + * _current_frames) + * from _pydev_bundle import pydev_log # <<<<<<<<<<<<<< + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * pydev_log.debug("Using Cython speedups") + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_pydev_log); + __Pyx_GIVEREF(__pyx_n_s_pydev_log); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_pydev_log); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydev_bundle, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pydev_log, __pyx_t_2) < 0) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":11 + * from _pydev_bundle import pydev_log + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * pydev_log.debug("Using Cython speedups") # <<<<<<<<<<<<<< + * # ELSE + * # from _pydevd_bundle.pydevd_frame import PyDBFrame + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_debug); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__15, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":16 + * # ENDIF + * + * version = 11 # <<<<<<<<<<<<<< + * + * + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_version, __pyx_int_11) < 0) __PYX_ERR(0, 16, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":142 + * + * + * _set_additional_thread_info_lock = ForkSafeLock() # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_ForkSafeLock); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 142, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 142, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_set_additional_thread_info_lock, __pyx_t_2) < 0) __PYX_ERR(0, 142, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":145 + * + * + * def set_additional_thread_info(thread): # <<<<<<<<<<<<<< + * try: + * additional_info = thread.additional_info + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_1set_additional_thread_info, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 145, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_set_additional_thread_info, __pyx_t_2) < 0) __PYX_ERR(0, 145, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":160 + * + * return additional_info + * import linecache # <<<<<<<<<<<<<< + * import os.path + * import re + */ + __pyx_t_2 = __Pyx_Import(__pyx_n_s_linecache, 0, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_linecache, __pyx_t_2) < 0) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":161 + * return additional_info + * import linecache + * import os.path # <<<<<<<<<<<<<< + * import re + * + */ + __pyx_t_2 = __Pyx_Import(__pyx_n_s_os_path, 0, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 161, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_os, __pyx_t_2) < 0) __PYX_ERR(0, 161, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":162 + * import linecache + * import os.path + * import re # <<<<<<<<<<<<<< + * + * from _pydev_bundle import pydev_log + */ + __pyx_t_2 = __Pyx_Import(__pyx_n_s_re, 0, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 162, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_re, __pyx_t_2) < 0) __PYX_ERR(0, 162, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":164 + * import re + * + * from _pydev_bundle import pydev_log # <<<<<<<<<<<<<< + * from _pydevd_bundle import pydevd_dont_trace + * from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE, + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_pydev_log); + __Pyx_GIVEREF(__pyx_n_s_pydev_log); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_pydev_log); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydev_bundle, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pydev_log, __pyx_t_2) < 0) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":165 + * + * from _pydev_bundle import pydev_log + * from _pydevd_bundle import pydevd_dont_trace # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE, + * EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED, PYDEVD_IPYTHON_CONTEXT) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 165, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_pydevd_dont_trace); + __Pyx_GIVEREF(__pyx_n_s_pydevd_dont_trace); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_pydevd_dont_trace); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 165, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 165, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pydevd_dont_trace, __pyx_t_1) < 0) __PYX_ERR(0, 165, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":166 + * from _pydev_bundle import pydev_log + * from _pydevd_bundle import pydevd_dont_trace + * from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE, # <<<<<<<<<<<<<< + * EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED, PYDEVD_IPYTHON_CONTEXT) + * from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace + */ + __pyx_t_2 = PyList_New(5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_RETURN_VALUES_DICT); + __Pyx_GIVEREF(__pyx_n_s_RETURN_VALUES_DICT); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_RETURN_VALUES_DICT); + __Pyx_INCREF(__pyx_n_s_NO_FTRACE); + __Pyx_GIVEREF(__pyx_n_s_NO_FTRACE); + PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_NO_FTRACE); + __Pyx_INCREF(__pyx_n_s_EXCEPTION_TYPE_HANDLED); + __Pyx_GIVEREF(__pyx_n_s_EXCEPTION_TYPE_HANDLED); + PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_EXCEPTION_TYPE_HANDLED); + __Pyx_INCREF(__pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED); + __Pyx_GIVEREF(__pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED); + PyList_SET_ITEM(__pyx_t_2, 3, __pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED); + __Pyx_INCREF(__pyx_n_s_PYDEVD_IPYTHON_CONTEXT); + __Pyx_GIVEREF(__pyx_n_s_PYDEVD_IPYTHON_CONTEXT); + PyList_SET_ITEM(__pyx_t_2, 4, __pyx_n_s_PYDEVD_IPYTHON_CONTEXT); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_RETURN_VALUES_DICT, __pyx_t_2) < 0) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NO_FTRACE, __pyx_t_2) < 0) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_EXCEPTION_TYPE_HANDLED); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_EXCEPTION_TYPE_HANDLED, __pyx_t_2) < 0) __PYX_ERR(0, 167, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED, __pyx_t_2) < 0) __PYX_ERR(0, 167, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_PYDEVD_IPYTHON_CONTEXT); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_PYDEVD_IPYTHON_CONTEXT, __pyx_t_2) < 0) __PYX_ERR(0, 167, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":168 + * from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE, + * EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED, PYDEVD_IPYTHON_CONTEXT) + * from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + */ + __pyx_t_1 = PyList_New(4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_add_exception_to_frame); + __Pyx_GIVEREF(__pyx_n_s_add_exception_to_frame); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_add_exception_to_frame); + __Pyx_INCREF(__pyx_n_s_just_raised); + __Pyx_GIVEREF(__pyx_n_s_just_raised); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_just_raised); + __Pyx_INCREF(__pyx_n_s_remove_exception_from_frame); + __Pyx_GIVEREF(__pyx_n_s_remove_exception_from_frame); + PyList_SET_ITEM(__pyx_t_1, 2, __pyx_n_s_remove_exception_from_frame); + __Pyx_INCREF(__pyx_n_s_ignore_exception_trace); + __Pyx_GIVEREF(__pyx_n_s_ignore_exception_trace); + PyList_SET_ITEM(__pyx_t_1, 3, __pyx_n_s_ignore_exception_trace); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_frame_util, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_add_exception_to_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_add_exception_to_frame, __pyx_t_1) < 0) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_just_raised); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_just_raised, __pyx_t_1) < 0) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_remove_exception_from_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_remove_exception_from_frame, __pyx_t_1) < 0) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_ignore_exception_trace); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_ignore_exception_trace, __pyx_t_1) < 0) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":169 + * EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED, PYDEVD_IPYTHON_CONTEXT) + * from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code # <<<<<<<<<<<<<< + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * from _pydevd_bundle.pydevd_comm_constants import constant_to_str, CMD_SET_FUNCTION_BREAK + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 169, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_get_clsname_for_code); + __Pyx_GIVEREF(__pyx_n_s_get_clsname_for_code); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_get_clsname_for_code); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_utils, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 169, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_get_clsname_for_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 169, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_clsname_for_code, __pyx_t_2) < 0) __PYX_ERR(0, 169, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":170 + * from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_comm_constants import constant_to_str, CMD_SET_FUNCTION_BREAK + * try: + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 170, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_GIVEREF(__pyx_n_s_get_abs_path_real_path_and_base); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_get_abs_path_real_path_and_base); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_file_utils, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 170, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 170, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_abs_path_real_path_and_base, __pyx_t_1) < 0) __PYX_ERR(0, 170, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":171 + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * from _pydevd_bundle.pydevd_comm_constants import constant_to_str, CMD_SET_FUNCTION_BREAK # <<<<<<<<<<<<<< + * try: + * from _pydevd_bundle.pydevd_bytecode_utils import get_smart_step_into_variant_from_frame_offset + */ + __pyx_t_2 = PyList_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 171, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_constant_to_str); + __Pyx_GIVEREF(__pyx_n_s_constant_to_str); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_constant_to_str); + __Pyx_INCREF(__pyx_n_s_CMD_SET_FUNCTION_BREAK); + __Pyx_GIVEREF(__pyx_n_s_CMD_SET_FUNCTION_BREAK); + PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_CMD_SET_FUNCTION_BREAK); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_comm_const, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 171, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_constant_to_str); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 171, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_constant_to_str, __pyx_t_2) < 0) __PYX_ERR(0, 171, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_CMD_SET_FUNCTION_BREAK); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 171, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_SET_FUNCTION_BREAK, __pyx_t_2) < 0) __PYX_ERR(0, 171, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":172 + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * from _pydevd_bundle.pydevd_comm_constants import constant_to_str, CMD_SET_FUNCTION_BREAK + * try: # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_bytecode_utils import get_smart_step_into_variant_from_frame_offset + * except ImportError: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_3, &__pyx_t_4, &__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_5); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":173 + * from _pydevd_bundle.pydevd_comm_constants import constant_to_str, CMD_SET_FUNCTION_BREAK + * try: + * from _pydevd_bundle.pydevd_bytecode_utils import get_smart_step_into_variant_from_frame_offset # <<<<<<<<<<<<<< + * except ImportError: + * + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 173, __pyx_L2_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_get_smart_step_into_variant_from); + __Pyx_GIVEREF(__pyx_n_s_get_smart_step_into_variant_from); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_get_smart_step_into_variant_from); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_bytecode_u, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 173, __pyx_L2_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_get_smart_step_into_variant_from); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 173, __pyx_L2_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_smart_step_into_variant_from, __pyx_t_1) < 0) __PYX_ERR(0, 173, __pyx_L2_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":172 + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * from _pydevd_bundle.pydevd_comm_constants import constant_to_str, CMD_SET_FUNCTION_BREAK + * try: # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_bytecode_utils import get_smart_step_into_variant_from_frame_offset + * except ImportError: + */ + } + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L7_try_end; + __pyx_L2_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":174 + * try: + * from _pydevd_bundle.pydevd_bytecode_utils import get_smart_step_into_variant_from_frame_offset + * except ImportError: # <<<<<<<<<<<<<< + * + * def get_smart_step_into_variant_from_frame_offset(*args, **kwargs): + */ + __pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_ImportError); + if (__pyx_t_6) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_2, &__pyx_t_1, &__pyx_t_7) < 0) __PYX_ERR(0, 174, __pyx_L4_except_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_7); + + /* "_pydevd_bundle/pydevd_cython.pyx":176 + * except ImportError: + * + * def get_smart_step_into_variant_from_frame_offset(*args, **kwargs): # <<<<<<<<<<<<<< + * return None + * + */ + __pyx_t_8 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_3get_smart_step_into_variant_from_frame_offset, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 176, __pyx_L4_except_error) + __Pyx_GOTREF(__pyx_t_8); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_smart_step_into_variant_from, __pyx_t_8) < 0) __PYX_ERR(0, 176, __pyx_L4_except_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L3_exception_handled; + } + goto __pyx_L4_except_error; + __pyx_L4_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":172 + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * from _pydevd_bundle.pydevd_comm_constants import constant_to_str, CMD_SET_FUNCTION_BREAK + * try: # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_bytecode_utils import get_smart_step_into_variant_from_frame_offset + * except ImportError: + */ + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5); + goto __pyx_L1_error; + __pyx_L3_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5); + __pyx_L7_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":196 + * # ENDIF + * + * basename = os.path.basename # <<<<<<<<<<<<<< + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_os); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 196, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_path); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 196, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_basename); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 196, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_basename, __pyx_t_7) < 0) __PYX_ERR(0, 196, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":198 + * basename = os.path.basename + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') # <<<<<<<<<<<<<< + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_re); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 198, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_compile); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 198, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple__20, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 198, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_IGNORE_EXCEPTION_TAG, __pyx_t_7) < 0) __PYX_ERR(0, 198, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":199 + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + * DEBUG_START = ('pydevd.py', 'run') # <<<<<<<<<<<<<< + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DEBUG_START, __pyx_tuple__21) < 0) __PYX_ERR(0, 199, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":200 + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') # <<<<<<<<<<<<<< + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + * + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DEBUG_START_PY3K, __pyx_tuple__22) < 0) __PYX_ERR(0, 200, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":201 + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + * TRACE_PROPERTY = 'pydevd_traceproperty.py' # <<<<<<<<<<<<<< + * + * import dis + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_TRACE_PROPERTY, __pyx_kp_s_pydevd_traceproperty_py) < 0) __PYX_ERR(0, 201, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":203 + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + * + * import dis # <<<<<<<<<<<<<< + * + * try: + */ + __pyx_t_7 = __Pyx_Import(__pyx_n_s_dis, 0, -1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 203, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_dis, __pyx_t_7) < 0) __PYX_ERR(0, 203, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":205 + * import dis + * + * try: # <<<<<<<<<<<<<< + * StopAsyncIteration + * except NameError: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_5, &__pyx_t_4, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":206 + * + * try: + * StopAsyncIteration # <<<<<<<<<<<<<< + * except NameError: + * StopAsyncIteration = StopIteration + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_StopAsyncIteration); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 206, __pyx_L10_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":205 + * import dis + * + * try: # <<<<<<<<<<<<<< + * StopAsyncIteration + * except NameError: + */ + } + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L15_try_end; + __pyx_L10_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":207 + * try: + * StopAsyncIteration + * except NameError: # <<<<<<<<<<<<<< + * StopAsyncIteration = StopIteration + * + */ + __pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_NameError); + if (__pyx_t_6) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_7, &__pyx_t_1, &__pyx_t_2) < 0) __PYX_ERR(0, 207, __pyx_L12_except_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_2); + + /* "_pydevd_bundle/pydevd_cython.pyx":208 + * StopAsyncIteration + * except NameError: + * StopAsyncIteration = StopIteration # <<<<<<<<<<<<<< + * + * + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_StopAsyncIteration, __pyx_builtin_StopIteration) < 0) __PYX_ERR(0, 208, __pyx_L12_except_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + goto __pyx_L11_exception_handled; + } + goto __pyx_L12_except_error; + __pyx_L12_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":205 + * import dis + * + * try: # <<<<<<<<<<<<<< + * StopAsyncIteration + * except NameError: + */ + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_4, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L11_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_4, __pyx_t_3); + __pyx_L15_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":286 + * # Same thing in the main debugger but only considering the file contents, while the one in the main debugger + * # considers the user input (so, the actual result must be a join of both). + * filename_to_lines_where_exceptions_are_ignored = {} # <<<<<<<<<<<<<< + * filename_to_stat_info = {} + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 286, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame->tp_dict, __pyx_n_s_filename_to_lines_where_exceptio, __pyx_t_2) < 0) __PYX_ERR(0, 286, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame); + + /* "_pydevd_bundle/pydevd_cython.pyx":287 + * # considers the user input (so, the actual result must be a join of both). + * filename_to_lines_where_exceptions_are_ignored = {} + * filename_to_stat_info = {} # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 287, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame->tp_dict, __pyx_n_s_filename_to_stat_info, __pyx_t_2) < 0) __PYX_ERR(0, 287, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame); + + /* "_pydevd_bundle/pydevd_cython.pyx":1387 + * + * # end trace_dispatch + * from _pydev_bundle.pydev_is_thread_alive import is_thread_alive # <<<<<<<<<<<<<< + * from _pydev_bundle.pydev_log import exception as pydev_log_exception + * from _pydev_bundle._pydev_saved_modules import threading + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1387, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_is_thread_alive); + __Pyx_GIVEREF(__pyx_n_s_is_thread_alive); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_is_thread_alive); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydev_bundle_pydev_is_thread_al, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1387, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_is_thread_alive); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1387, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_thread_alive, __pyx_t_2) < 0) __PYX_ERR(0, 1387, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1388 + * # end trace_dispatch + * from _pydev_bundle.pydev_is_thread_alive import is_thread_alive + * from _pydev_bundle.pydev_log import exception as pydev_log_exception # <<<<<<<<<<<<<< + * from _pydev_bundle._pydev_saved_modules import threading + * from _pydevd_bundle.pydevd_constants import (get_current_thread_id, NO_FTRACE, + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1388, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_exception); + __Pyx_GIVEREF(__pyx_n_s_exception); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_exception); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydev_bundle_pydev_log, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1388, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_exception); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1388, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pydev_log_exception, __pyx_t_1) < 0) __PYX_ERR(0, 1388, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1389 + * from _pydev_bundle.pydev_is_thread_alive import is_thread_alive + * from _pydev_bundle.pydev_log import exception as pydev_log_exception + * from _pydev_bundle._pydev_saved_modules import threading # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_constants import (get_current_thread_id, NO_FTRACE, + * USE_CUSTOM_SYS_CURRENT_FRAMES_MAP, ForkSafeLock) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1389, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_threading); + __Pyx_GIVEREF(__pyx_n_s_threading); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_threading); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydev_bundle__pydev_saved_modul, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1389, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_threading); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1389, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_threading, __pyx_t_2) < 0) __PYX_ERR(0, 1389, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1390 + * from _pydev_bundle.pydev_log import exception as pydev_log_exception + * from _pydev_bundle._pydev_saved_modules import threading + * from _pydevd_bundle.pydevd_constants import (get_current_thread_id, NO_FTRACE, # <<<<<<<<<<<<<< + * USE_CUSTOM_SYS_CURRENT_FRAMES_MAP, ForkSafeLock) + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + */ + __pyx_t_1 = PyList_New(4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_get_current_thread_id); + __Pyx_GIVEREF(__pyx_n_s_get_current_thread_id); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_get_current_thread_id); + __Pyx_INCREF(__pyx_n_s_NO_FTRACE); + __Pyx_GIVEREF(__pyx_n_s_NO_FTRACE); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_NO_FTRACE); + __Pyx_INCREF(__pyx_n_s_USE_CUSTOM_SYS_CURRENT_FRAMES_MA); + __Pyx_GIVEREF(__pyx_n_s_USE_CUSTOM_SYS_CURRENT_FRAMES_MA); + PyList_SET_ITEM(__pyx_t_1, 2, __pyx_n_s_USE_CUSTOM_SYS_CURRENT_FRAMES_MA); + __Pyx_INCREF(__pyx_n_s_ForkSafeLock); + __Pyx_GIVEREF(__pyx_n_s_ForkSafeLock); + PyList_SET_ITEM(__pyx_t_1, 3, __pyx_n_s_ForkSafeLock); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_get_current_thread_id); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_current_thread_id, __pyx_t_1) < 0) __PYX_ERR(0, 1390, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NO_FTRACE, __pyx_t_1) < 0) __PYX_ERR(0, 1390, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_USE_CUSTOM_SYS_CURRENT_FRAMES_MA); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_USE_CUSTOM_SYS_CURRENT_FRAMES_MA, __pyx_t_1) < 0) __PYX_ERR(0, 1391, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_ForkSafeLock); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_ForkSafeLock, __pyx_t_1) < 0) __PYX_ERR(0, 1391, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1392 + * from _pydevd_bundle.pydevd_constants import (get_current_thread_id, NO_FTRACE, + * USE_CUSTOM_SYS_CURRENT_FRAMES_MAP, ForkSafeLock) + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __pyx_t_2 = PyList_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1392, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_GIVEREF(__pyx_n_s_get_abs_path_real_path_and_base); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_INCREF(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + __Pyx_GIVEREF(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_file_utils, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1392, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1392, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_abs_path_real_path_and_base, __pyx_t_2) < 0) __PYX_ERR(0, 1392, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1392, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER, __pyx_t_2) < 0) __PYX_ERR(0, 1392, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1415 + * # - Breakpoints are changed + * # It can be used when running regularly (without step over/step in/step return) + * global_cache_skips = {} # <<<<<<<<<<<<<< + * global_cache_frame_skips = {} + * + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1415, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_global_cache_skips, __pyx_t_1) < 0) __PYX_ERR(0, 1415, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1416 + * # It can be used when running regularly (without step over/step in/step return) + * global_cache_skips = {} + * global_cache_frame_skips = {} # <<<<<<<<<<<<<< + * + * _global_notify_skipped_step_in = False + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1416, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_global_cache_frame_skips, __pyx_t_1) < 0) __PYX_ERR(0, 1416, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1418 + * global_cache_frame_skips = {} + * + * _global_notify_skipped_step_in = False # <<<<<<<<<<<<<< + * _global_notify_skipped_step_in_lock = ForkSafeLock() + * + */ + __Pyx_INCREF(Py_False); + __Pyx_XGOTREF(__pyx_v_14_pydevd_bundle_13pydevd_cython__global_notify_skipped_step_in); + __Pyx_DECREF_SET(__pyx_v_14_pydevd_bundle_13pydevd_cython__global_notify_skipped_step_in, ((PyObject*)Py_False)); + __Pyx_GIVEREF(Py_False); + + /* "_pydevd_bundle/pydevd_cython.pyx":1419 + * + * _global_notify_skipped_step_in = False + * _global_notify_skipped_step_in_lock = ForkSafeLock() # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_ForkSafeLock); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_global_notify_skipped_step_in_l, __pyx_t_2) < 0) __PYX_ERR(0, 1419, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1422 + * + * + * def notify_skipped_step_in_because_of_filters(py_db, frame): # <<<<<<<<<<<<<< + * global _global_notify_skipped_step_in + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_5notify_skipped_step_in_because_of_filters, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1422, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_notify_skipped_step_in_because_o, __pyx_t_2) < 0) __PYX_ERR(0, 1422, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1452 + * + * + * def fix_top_level_trace_and_get_trace_func(py_db, frame): # <<<<<<<<<<<<<< + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef str filename; + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_7fix_top_level_trace_and_get_trace_func, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1452, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_fix_top_level_trace_and_get_trac, __pyx_t_2) < 0) __PYX_ERR(0, 1452, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1580 + * + * + * def trace_dispatch(py_db, frame, event, arg): # <<<<<<<<<<<<<< + * thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) + * if thread_trace_func is None: + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_9trace_dispatch, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1580, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_trace_dispatch, __pyx_t_2) < 0) __PYX_ERR(0, 1580, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1854 + * + * + * if USE_CUSTOM_SYS_CURRENT_FRAMES_MAP: # <<<<<<<<<<<<<< + * # This is far from ideal, as we'll leak frames (we'll always have the last created frame, not really + * # the last topmost frame saved -- this should be Ok for our usage, but it may leak frames and things + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_USE_CUSTOM_SYS_CURRENT_FRAMES_MA); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1854, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1854, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1862 + * # + * # See: https://github.com/IronLanguages/main/issues/1630 + * from _pydevd_bundle.pydevd_constants import constructed_tid_to_last_frame # <<<<<<<<<<<<<< + * + * _original_call = ThreadTracer.__call__ + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1862, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_constructed_tid_to_last_frame); + __Pyx_GIVEREF(__pyx_n_s_constructed_tid_to_last_frame); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_constructed_tid_to_last_frame); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1862, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_constructed_tid_to_last_frame); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1862, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_constructed_tid_to_last_frame, __pyx_t_2) < 0) __PYX_ERR(0, 1862, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1864 + * from _pydevd_bundle.pydevd_constants import constructed_tid_to_last_frame + * + * _original_call = ThreadTracer.__call__ # <<<<<<<<<<<<<< + * + * def __call__(self, frame, event, arg): + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer), __pyx_n_s_call_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1864, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_original_call, __pyx_t_1) < 0) __PYX_ERR(0, 1864, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1866 + * _original_call = ThreadTracer.__call__ + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * constructed_tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_11__call__, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1866, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_call_2, __pyx_t_1) < 0) __PYX_ERR(0, 1866, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1870 + * return _original_call(self, frame, event, arg) + * + * ThreadTracer.__call__ = __call__ # <<<<<<<<<<<<<< + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_call_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1870, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (__Pyx_PyObject_SetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer), __pyx_n_s_call_2, __pyx_t_1) < 0) __PYX_ERR(0, 1870, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1854 + * + * + * if USE_CUSTOM_SYS_CURRENT_FRAMES_MAP: # <<<<<<<<<<<<<< + * # This is far from ideal, as we'll leak frames (we'll always have the last created frame, not really + * # the last topmost frame saved -- this should be Ok for our usage, but it may leak frames and things + */ + } + + /* "(tree fragment)":1 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_13__pyx_unpickle_PyDBAdditionalThreadInfo, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_PyDBAdditionalThr, __pyx_t_1) < 0) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.conditional_breakpoint_exception = __pyx_state[0]; __pyx_result.is_tracing = __pyx_state[1]; __pyx_result.pydev_call_from_jinja2 = __pyx_state[2]; __pyx_result.pydev_call_inside_jinja2 = __pyx_state[3]; __pyx_result.pydev_django_resolve_frame = __pyx_state[4]; __pyx_result.pydev_func_name = __pyx_state[5]; __pyx_result.pydev_message = __pyx_state[6]; __pyx_result.pydev_next_line = __pyx_state[7]; __pyx_result.pydev_notify_kill = __pyx_state[8]; __pyx_result.pydev_original_step_cmd = __pyx_state[9]; __pyx_result.pydev_smart_child_offset = __pyx_state[10]; __pyx_result.pydev_smart_parent_offset = __pyx_state[11]; __pyx_result.pydev_smart_step_into_variants = __pyx_state[12]; __pyx_result.pydev_smart_step_stop = __pyx_state[13]; __pyx_result.pydev_state = __pyx_state[14]; __pyx_result.pydev_step_cmd = __pyx_state[15]; __pyx_result.pydev_step_stop = __pyx_state[16]; __pyx_result.pydev_use_scoped_step_frame = __pyx_state[17]; __pyx_result.step_in_initial_location = __pyx_state[18]; __pyx_result.suspend_type = __pyx_state[19]; __pyx_result.suspended_at_unhandled = __pyx_state[20]; __pyx_result.target_id_to_smart_step_into_variant = __pyx_state[21]; __pyx_result.thread_tracer = __pyx_state[22]; __pyx_result.top_level_thread_tracer_no_back_frames = __pyx_state[23]; __pyx_result.top_level_thread_tracer_unhandled = __pyx_state[24]; __pyx_result.trace_suspend_type = __pyx_state[25] + * if len(__pyx_state) > 26 and hasattr(__pyx_result, '__dict__'): + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_15__pyx_unpickle__TryExceptContainerObj, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle__TryExceptContain, __pyx_t_1) < 0) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_PyDBFrame(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBFrame, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_PyDBFrame, __pyx_t_1) < 0) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * __pyx_unpickle_PyDBFrame__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0]; __pyx_result.exc_info = __pyx_state[1]; __pyx_result.should_skip = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_SafeCallWrapper, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_SafeCallWrapper, __pyx_t_1) < 0) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_TopLevelThreadTra, __pyx_t_1) < 0) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_TopLevelThreadTracerOnlyUnhandledExceptions__set_state(TopLevelThreadTracerOnlyUnhandledExceptions __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._args = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_TopLevelThreadTracerNoBackFrame, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_TopLevelThreadTra_2, __pyx_t_1) < 0) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_ThreadTracer(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_25__pyx_unpickle_ThreadTracer, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_ThreadTracer, __pyx_t_1) < 0) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1 + * from __future__ import print_function # <<<<<<<<<<<<<< + * + * # Important: Autogenerated file. + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init _pydevd_bundle.pydevd_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init _pydevd_bundle.pydevd_cython"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kwdict, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + while (PyDict_Next(kwdict, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if ((!kw_allowed) && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* PyDictVersioning */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallNoArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) +#else + if (likely(PyCFunction_Check(func))) +#endif + { + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); +} +#endif + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (__Pyx_PyFastCFunction_Check(func)) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* PyObjectCall2Args */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args, *result = NULL; + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyFunction_FastCall(function, args, 2); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyCFunction_FastCall(function, args, 2); + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); +done: + return result; +} + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* GetAttr3 */ +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r = __Pyx_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +} + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* GetTopmostException */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* PyObjectSetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_setattro)) + return tp->tp_setattro(obj, attr_name, value); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_setattr)) + return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value); +#endif + return PyObject_SetAttr(obj, attr_name, value); +} +#endif + +/* None */ +static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) { + PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname); +} + +/* pyfrozenset_new */ +static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it) { + if (it) { + PyObject* result; +#if CYTHON_COMPILING_IN_PYPY + PyObject* args; + args = PyTuple_Pack(1, it); + if (unlikely(!args)) + return NULL; + result = PyObject_Call((PyObject*)&PyFrozenSet_Type, args, NULL); + Py_DECREF(args); + return result; +#else + if (PyFrozenSet_CheckExact(it)) { + Py_INCREF(it); + return it; + } + result = PyFrozenSet_New(it); + if (unlikely(!result)) + return NULL; + if ((PY_VERSION_HEX >= 0x031000A1) || likely(PySet_GET_SIZE(result))) + return result; + Py_DECREF(result); +#endif + } +#if CYTHON_USE_TYPE_SLOTS + return PyFrozenSet_Type.tp_new(&PyFrozenSet_Type, __pyx_empty_tuple, NULL); +#else + return PyObject_Call((PyObject*)&PyFrozenSet_Type, __pyx_empty_tuple, NULL); +#endif +} + +/* PySetContains */ +static int __Pyx_PySet_ContainsUnhashable(PyObject *set, PyObject *key) { + int result = -1; + if (PySet_Check(key) && PyErr_ExceptionMatches(PyExc_TypeError)) { + PyObject *tmpkey; + PyErr_Clear(); + tmpkey = __Pyx_PyFrozenSet_New(key); + if (tmpkey != NULL) { + result = PySet_Contains(set, tmpkey); + Py_DECREF(tmpkey); + } + } + return result; +} +static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq) { + int result = PySet_Contains(set, key); + if (unlikely(result < 0)) { + result = __Pyx_PySet_ContainsUnhashable(set, key); + } + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* ArgTypeTest */ +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + else if (exact) { + #if PY_MAJOR_VERSION == 2 + if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(__Pyx_TypeCheck(obj, type))) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* RaiseTooManyValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* IterFinish */ +static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +/* UnpackItemEndCheck */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +/* ExtTypeTest */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* HasAttr */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (unlikely(!r)) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* UnpackUnboundCMethod */ +static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { + PyObject *method; + method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); + if (unlikely(!method)) + return -1; + target->method = method; +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION >= 3 + if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) + #endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject*) method; + target->func = descr->d_method->ml_meth; + target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); + } +#endif + return 0; +} + +/* CallUnboundCMethod1 */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { + if (likely(cfunc->func)) { + int flag = cfunc->flag; + if (flag == METH_O) { + return (*(cfunc->func))(self, arg); + } else if (PY_VERSION_HEX >= 0x030600B1 && flag == METH_FASTCALL) { + if (PY_VERSION_HEX >= 0x030700A0) { + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); + } else { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } else if (PY_VERSION_HEX >= 0x030700A0 && flag == (METH_FASTCALL | METH_KEYWORDS)) { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } + return __Pyx__CallUnboundCMethod1(cfunc, self, arg); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(2, self, arg); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + +/* CallUnboundCMethod2 */ +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2) { + if (likely(cfunc->func)) { + PyObject *args[2] = {arg1, arg2}; + if (cfunc->flag == METH_FASTCALL) { + #if PY_VERSION_HEX >= 0x030700A0 + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, args, 2); + #else + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif + } + #if PY_VERSION_HEX >= 0x030700A0 + if (cfunc->flag == (METH_FASTCALL | METH_KEYWORDS)) + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif + } + return __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(3); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 1, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 2, arg2); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(3, self, arg1, arg2); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + +/* dict_getitem_default */ +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) { + PyObject* value; +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (unlikely(PyErr_Occurred())) + return NULL; + value = default_value; + } + Py_INCREF(value); + if ((1)); +#else + if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) { + value = PyDict_GetItem(d, key); + if (unlikely(!value)) { + value = default_value; + } + Py_INCREF(value); + } +#endif + else { + if (default_value == Py_None) + value = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_get, d, key); + else + value = __Pyx_CallUnboundCMethod2(&__pyx_umethod_PyDict_Type_get, d, key, default_value); + } + return value; +} + +/* SwapException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = *type; + exc_info->exc_value = *value; + exc_info->exc_traceback = *tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = *type; + tstate->exc_value = *value; + tstate->exc_traceback = *tb; + #endif + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); + PyErr_SetExcInfo(*type, *value, *tb); + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#endif + +/* RaiseNoneIterError */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +/* PyIntBinop */ +#if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_AndObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, int inplace, int zerodivision_check) { + (void)inplace; + (void)zerodivision_check; + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(op1))) { + const long b = intval; + long a = PyInt_AS_LONG(op1); + return PyInt_FromLong(a & b); + } + #endif + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact(op1))) { + const long b = intval; + long a, x; +#ifdef HAVE_LONG_LONG + const PY_LONG_LONG llb = intval; + PY_LONG_LONG lla, llx; +#endif + const digit* digits = ((PyLongObject*)op1)->ob_digit; + const Py_ssize_t size = Py_SIZE(op1); + if (likely(__Pyx_sst_abs(size) <= 1)) { + a = likely(size) ? digits[0] : 0; + if (size == -1) a = -a; + } else { + switch (size) { + case -2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case 2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case -3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case 3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case -4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case 4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + default: return PyLong_Type.tp_as_number->nb_and(op1, op2); + } + } + x = a & b; + return PyLong_FromLong(x); +#ifdef HAVE_LONG_LONG + long_long: + llx = lla & llb; + return PyLong_FromLongLong(llx); +#endif + + + } + #endif + return (inplace ? PyNumber_InPlaceAnd : PyNumber_And)(op1, op2); +} +#endif + +/* PyObjectGetMethod */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + assert (*method == NULL); + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); +#if PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type))) + #endif +#else + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + if (meth_found) { + *method = descr; + return 1; + } + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + if (descr != NULL) { + *method = descr; + return 0; + } + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(name)); +#endif + return 0; +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif +try_unpack: +#if CYTHON_UNPACK_METHODS + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + +/* PyObjectCallMethod0 */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { + PyObject *method = NULL, *result = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_CallOneArg(method, obj); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) goto bad; + result = __Pyx_PyObject_CallNoArg(method); + Py_DECREF(method); +bad: + return result; +} + +/* UnpackTupleError */ +static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) { + if (t == Py_None) { + __Pyx_RaiseNoneNotIterableError(); + } else if (PyTuple_GET_SIZE(t) < index) { + __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t)); + } else { + __Pyx_RaiseTooManyValuesError(index); + } +} + +/* UnpackTuple2 */ +static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( + PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int decref_tuple) { + PyObject *value1 = NULL, *value2 = NULL; +#if CYTHON_COMPILING_IN_PYPY + value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad; + value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad; +#else + value1 = PyTuple_GET_ITEM(tuple, 0); Py_INCREF(value1); + value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value2); +#endif + if (decref_tuple) { + Py_DECREF(tuple); + } + *pvalue1 = value1; + *pvalue2 = value2; + return 0; +#if CYTHON_COMPILING_IN_PYPY +bad: + Py_XDECREF(value1); + Py_XDECREF(value2); + if (decref_tuple) { Py_XDECREF(tuple); } + return -1; +#endif +} +static int __Pyx_unpack_tuple2_generic(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, + int has_known_size, int decref_tuple) { + Py_ssize_t index; + PyObject *value1 = NULL, *value2 = NULL, *iter = NULL; + iternextfunc iternext; + iter = PyObject_GetIter(tuple); + if (unlikely(!iter)) goto bad; + if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; } + iternext = Py_TYPE(iter)->tp_iternext; + value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; } + value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; } + if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad; + Py_DECREF(iter); + *pvalue1 = value1; + *pvalue2 = value2; + return 0; +unpacking_failed: + if (!has_known_size && __Pyx_IterFinish() == 0) + __Pyx_RaiseNeedMoreValuesError(index); +bad: + Py_XDECREF(iter); + Py_XDECREF(value1); + Py_XDECREF(value2); + if (decref_tuple) { Py_XDECREF(tuple); } + return -1; +} + +/* dict_iter */ +static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_dict, PyObject* method_name, + Py_ssize_t* p_orig_length, int* p_source_is_dict) { + is_dict = is_dict || likely(PyDict_CheckExact(iterable)); + *p_source_is_dict = is_dict; + if (is_dict) { +#if !CYTHON_COMPILING_IN_PYPY + *p_orig_length = PyDict_Size(iterable); + Py_INCREF(iterable); + return iterable; +#elif PY_MAJOR_VERSION >= 3 + static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL; + PyObject **pp = NULL; + if (method_name) { + const char *name = PyUnicode_AsUTF8(method_name); + if (strcmp(name, "iteritems") == 0) pp = &py_items; + else if (strcmp(name, "iterkeys") == 0) pp = &py_keys; + else if (strcmp(name, "itervalues") == 0) pp = &py_values; + if (pp) { + if (!*pp) { + *pp = PyUnicode_FromString(name + 4); + if (!*pp) + return NULL; + } + method_name = *pp; + } + } +#endif + } + *p_orig_length = 0; + if (method_name) { + PyObject* iter; + iterable = __Pyx_PyObject_CallMethod0(iterable, method_name); + if (!iterable) + return NULL; +#if !CYTHON_COMPILING_IN_PYPY + if (PyTuple_CheckExact(iterable) || PyList_CheckExact(iterable)) + return iterable; +#endif + iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + return iter; + } + return PyObject_GetIter(iterable); +} +static CYTHON_INLINE int __Pyx_dict_iter_next( + PyObject* iter_obj, CYTHON_NCP_UNUSED Py_ssize_t orig_length, CYTHON_NCP_UNUSED Py_ssize_t* ppos, + PyObject** pkey, PyObject** pvalue, PyObject** pitem, int source_is_dict) { + PyObject* next_item; +#if !CYTHON_COMPILING_IN_PYPY + if (source_is_dict) { + PyObject *key, *value; + if (unlikely(orig_length != PyDict_Size(iter_obj))) { + PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); + return -1; + } + if (unlikely(!PyDict_Next(iter_obj, ppos, &key, &value))) { + return 0; + } + if (pitem) { + PyObject* tuple = PyTuple_New(2); + if (unlikely(!tuple)) { + return -1; + } + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(tuple, 0, key); + PyTuple_SET_ITEM(tuple, 1, value); + *pitem = tuple; + } else { + if (pkey) { + Py_INCREF(key); + *pkey = key; + } + if (pvalue) { + Py_INCREF(value); + *pvalue = value; + } + } + return 1; + } else if (PyTuple_CheckExact(iter_obj)) { + Py_ssize_t pos = *ppos; + if (unlikely(pos >= PyTuple_GET_SIZE(iter_obj))) return 0; + *ppos = pos + 1; + next_item = PyTuple_GET_ITEM(iter_obj, pos); + Py_INCREF(next_item); + } else if (PyList_CheckExact(iter_obj)) { + Py_ssize_t pos = *ppos; + if (unlikely(pos >= PyList_GET_SIZE(iter_obj))) return 0; + *ppos = pos + 1; + next_item = PyList_GET_ITEM(iter_obj, pos); + Py_INCREF(next_item); + } else +#endif + { + next_item = PyIter_Next(iter_obj); + if (unlikely(!next_item)) { + return __Pyx_IterFinish(); + } + } + if (pitem) { + *pitem = next_item; + } else if (pkey && pvalue) { + if (__Pyx_unpack_tuple2(next_item, pkey, pvalue, source_is_dict, source_is_dict, 1)) + return -1; + } else if (pkey) { + *pkey = next_item; + } else { + *pvalue = next_item; + } + return 1; +} + +/* CallUnboundCMethod0 */ +static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self) { + PyObject *args, *result = NULL; + if (unlikely(!cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_ASSUME_SAFE_MACROS + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); +#else + args = PyTuple_Pack(1, self); + if (unlikely(!args)) goto bad; +#endif + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + Py_DECREF(args); +bad: + return result; +} + +/* py_dict_values */ +static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyDict_Type_values, d); + else + return PyDict_Values(d); +} + +/* DictGetItem */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + if (unlikely(PyTuple_Check(key))) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) { + PyErr_SetObject(PyExc_KeyError, args); + Py_DECREF(args); + } + } else { + PyErr_SetObject(PyExc_KeyError, key); + } + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* SliceObject */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice(PyObject* obj, + Py_ssize_t cstart, Py_ssize_t cstop, + PyObject** _py_start, PyObject** _py_stop, PyObject** _py_slice, + int has_cstart, int has_cstop, CYTHON_UNUSED int wraparound) { +#if CYTHON_USE_TYPE_SLOTS + PyMappingMethods* mp; +#if PY_MAJOR_VERSION < 3 + PySequenceMethods* ms = Py_TYPE(obj)->tp_as_sequence; + if (likely(ms && ms->sq_slice)) { + if (!has_cstart) { + if (_py_start && (*_py_start != Py_None)) { + cstart = __Pyx_PyIndex_AsSsize_t(*_py_start); + if ((cstart == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; + } else + cstart = 0; + } + if (!has_cstop) { + if (_py_stop && (*_py_stop != Py_None)) { + cstop = __Pyx_PyIndex_AsSsize_t(*_py_stop); + if ((cstop == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; + } else + cstop = PY_SSIZE_T_MAX; + } + if (wraparound && unlikely((cstart < 0) | (cstop < 0)) && likely(ms->sq_length)) { + Py_ssize_t l = ms->sq_length(obj); + if (likely(l >= 0)) { + if (cstop < 0) { + cstop += l; + if (cstop < 0) cstop = 0; + } + if (cstart < 0) { + cstart += l; + if (cstart < 0) cstart = 0; + } + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + goto bad; + PyErr_Clear(); + } + } + return ms->sq_slice(obj, cstart, cstop); + } +#endif + mp = Py_TYPE(obj)->tp_as_mapping; + if (likely(mp && mp->mp_subscript)) +#endif + { + PyObject* result; + PyObject *py_slice, *py_start, *py_stop; + if (_py_slice) { + py_slice = *_py_slice; + } else { + PyObject* owned_start = NULL; + PyObject* owned_stop = NULL; + if (_py_start) { + py_start = *_py_start; + } else { + if (has_cstart) { + owned_start = py_start = PyInt_FromSsize_t(cstart); + if (unlikely(!py_start)) goto bad; + } else + py_start = Py_None; + } + if (_py_stop) { + py_stop = *_py_stop; + } else { + if (has_cstop) { + owned_stop = py_stop = PyInt_FromSsize_t(cstop); + if (unlikely(!py_stop)) { + Py_XDECREF(owned_start); + goto bad; + } + } else + py_stop = Py_None; + } + py_slice = PySlice_New(py_start, py_stop, Py_None); + Py_XDECREF(owned_start); + Py_XDECREF(owned_stop); + if (unlikely(!py_slice)) goto bad; + } +#if CYTHON_USE_TYPE_SLOTS + result = mp->mp_subscript(obj, py_slice); +#else + result = PyObject_GetItem(obj, py_slice); +#endif + if (!_py_slice) { + Py_DECREF(py_slice); + } + return result; + } + PyErr_Format(PyExc_TypeError, + "'%.200s' object is unsliceable", Py_TYPE(obj)->tp_name); +bad: + return NULL; +} + +/* PyIntBinop */ +#if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_AddObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, int inplace, int zerodivision_check) { + (void)inplace; + (void)zerodivision_check; + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(op1))) { + const long b = intval; + long x; + long a = PyInt_AS_LONG(op1); + x = (long)((unsigned long)a + b); + if (likely((x^a) >= 0 || (x^b) >= 0)) + return PyInt_FromLong(x); + return PyLong_Type.tp_as_number->nb_add(op1, op2); + } + #endif + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact(op1))) { + const long b = intval; + long a, x; +#ifdef HAVE_LONG_LONG + const PY_LONG_LONG llb = intval; + PY_LONG_LONG lla, llx; +#endif + const digit* digits = ((PyLongObject*)op1)->ob_digit; + const Py_ssize_t size = Py_SIZE(op1); + if (likely(__Pyx_sst_abs(size) <= 1)) { + a = likely(size) ? digits[0] : 0; + if (size == -1) a = -a; + } else { + switch (size) { + case -2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case 2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case -3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case 3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case -4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case 4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + default: return PyLong_Type.tp_as_number->nb_add(op1, op2); + } + } + x = a + b; + return PyLong_FromLong(x); +#ifdef HAVE_LONG_LONG + long_long: + llx = lla + llb; + return PyLong_FromLongLong(llx); +#endif + + + } + #endif + if (PyFloat_CheckExact(op1)) { + const long b = intval; + double a = PyFloat_AS_DOUBLE(op1); + double result; + PyFPE_START_PROTECT("add", return NULL) + result = ((double)a) + (double)b; + PyFPE_END_PROTECT(result) + return PyFloat_FromDouble(result); + } + return (inplace ? PyNumber_InPlaceAdd : PyNumber_Add)(op1, op2); +} +#endif + +/* PyObjectCallMethod1 */ +static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) { + PyObject *result = __Pyx_PyObject_CallOneArg(method, arg); + Py_DECREF(method); + return result; +} +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { + PyObject *method = NULL, *result; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_Call2Args(method, obj, arg); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) return NULL; + return __Pyx__PyObject_CallMethod1(method, arg); +} + +/* append */ +static CYTHON_INLINE int __Pyx_PyObject_Append(PyObject* L, PyObject* x) { + if (likely(PyList_CheckExact(L))) { + if (unlikely(__Pyx_PyList_Append(L, x) < 0)) return -1; + } else { + PyObject* retval = __Pyx_PyObject_CallMethod1(L, __pyx_n_s_append, x); + if (unlikely(!retval)) + return -1; + Py_DECREF(retval); + } + return 0; +} + +/* SliceTupleAndList */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE void __Pyx_crop_slice(Py_ssize_t* _start, Py_ssize_t* _stop, Py_ssize_t* _length) { + Py_ssize_t start = *_start, stop = *_stop, length = *_length; + if (start < 0) { + start += length; + if (start < 0) + start = 0; + } + if (stop < 0) + stop += length; + else if (stop > length) + stop = length; + *_length = stop - start; + *_start = start; + *_stop = stop; +} +static CYTHON_INLINE void __Pyx_copy_object_array(PyObject** CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { + PyObject *v; + Py_ssize_t i; + for (i = 0; i < length; i++) { + v = dest[i] = src[i]; + Py_INCREF(v); + } +} +static CYTHON_INLINE PyObject* __Pyx_PyList_GetSlice( + PyObject* src, Py_ssize_t start, Py_ssize_t stop) { + PyObject* dest; + Py_ssize_t length = PyList_GET_SIZE(src); + __Pyx_crop_slice(&start, &stop, &length); + if (unlikely(length <= 0)) + return PyList_New(0); + dest = PyList_New(length); + if (unlikely(!dest)) + return NULL; + __Pyx_copy_object_array( + ((PyListObject*)src)->ob_item + start, + ((PyListObject*)dest)->ob_item, + length); + return dest; +} +static CYTHON_INLINE PyObject* __Pyx_PyTuple_GetSlice( + PyObject* src, Py_ssize_t start, Py_ssize_t stop) { + PyObject* dest; + Py_ssize_t length = PyTuple_GET_SIZE(src); + __Pyx_crop_slice(&start, &stop, &length); + if (unlikely(length <= 0)) + return PyTuple_New(0); + dest = PyTuple_New(length); + if (unlikely(!dest)) + return NULL; + __Pyx_copy_object_array( + ((PyTupleObject*)src)->ob_item + start, + ((PyTupleObject*)dest)->ob_item, + length); + return dest; +} +#endif + +/* PyIntCompare */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED long inplace) { + if (op1 == op2) { + Py_RETURN_TRUE; + } + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(op1))) { + const long b = intval; + long a = PyInt_AS_LONG(op1); + if (a == b) Py_RETURN_TRUE; else Py_RETURN_FALSE; + } + #endif + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact(op1))) { + int unequal; + unsigned long uintval; + Py_ssize_t size = Py_SIZE(op1); + const digit* digits = ((PyLongObject*)op1)->ob_digit; + if (intval == 0) { + if (size == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE; + } else if (intval < 0) { + if (size >= 0) + Py_RETURN_FALSE; + intval = -intval; + size = -size; + } else { + if (size <= 0) + Py_RETURN_FALSE; + } + uintval = (unsigned long) intval; +#if PyLong_SHIFT * 4 < SIZEOF_LONG*8 + if (uintval >> (PyLong_SHIFT * 4)) { + unequal = (size != 5) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) + | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[4] != ((uintval >> (4 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); + } else +#endif +#if PyLong_SHIFT * 3 < SIZEOF_LONG*8 + if (uintval >> (PyLong_SHIFT * 3)) { + unequal = (size != 4) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) + | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); + } else +#endif +#if PyLong_SHIFT * 2 < SIZEOF_LONG*8 + if (uintval >> (PyLong_SHIFT * 2)) { + unequal = (size != 3) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) + | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); + } else +#endif +#if PyLong_SHIFT * 1 < SIZEOF_LONG*8 + if (uintval >> (PyLong_SHIFT * 1)) { + unequal = (size != 2) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) + | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); + } else +#endif + unequal = (size != 1) || (((unsigned long) digits[0]) != (uintval & (unsigned long) PyLong_MASK)); + if (unequal == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE; + } + #endif + if (PyFloat_CheckExact(op1)) { + const long b = intval; + double a = PyFloat_AS_DOUBLE(op1); + if ((double)a == (double)b) Py_RETURN_TRUE; else Py_RETURN_FALSE; + } + return ( + PyObject_RichCompare(op1, op2, Py_EQ)); +} + +/* ObjectGetItem */ +#if CYTHON_USE_TYPE_SLOTS +static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) { + PyObject *runerr; + Py_ssize_t key_value; + PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence; + if (unlikely(!(m && m->sq_item))) { + PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name); + return NULL; + } + key_value = __Pyx_PyIndex_AsSsize_t(index); + if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) { + return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1); + } + if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { + PyErr_Clear(); + PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name); + } + return NULL; +} +static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) { + PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping; + if (likely(m && m->mp_subscript)) { + return m->mp_subscript(obj, key); + } + return __Pyx_PyObject_GetIndex(obj, key); +} +#endif + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, attr_name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(attr_name)); +#endif + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* PyObjectGetAttrStrNoError */ +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +} + +/* SetupReduce */ +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_getstate = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; + PyObject *getstate = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); +#else + getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); + if (!getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (getstate) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); +#else + object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); + if (!object_getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (object_getstate != getstate) { + goto __PYX_GOOD; + } + } +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + goto __PYX_BAD; + } + setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + goto __PYX_BAD; + } + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto __PYX_GOOD; +__PYX_BAD: + if (!PyErr_Occurred()) + PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +__PYX_GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); + Py_XDECREF(object_getstate); + Py_XDECREF(getstate); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} + +/* SetVTable */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject *ob = PyCapsule_New(vtable, 0, 0); +#else + PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); +#endif + if (!ob) + goto bad; + if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* TypeImport */ +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, + size_t size, enum __Pyx_ImportType_CheckSize check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if ((size_t)basicsize < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +#if PY_VERSION_HEX >= 0x030b00a6 + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject *ptype, *pvalue, *ptraceback; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) { + /* If the code object creation fails, then we should clear the + fetched exception references and propagate the new exception */ + Py_XDECREF(ptype); + Py_XDECREF(pvalue); + Py_XDECREF(ptraceback); + goto bad; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i '9'); + break; + } + if (rt_from_call[i] != ctversion[i]) { + same = 0; + break; + } + } + if (!same) { + char rtversion[5] = {'\0'}; + char message[200]; + for (i=0; i<4; ++i) { + if (rt_from_call[i] == '.') { + if (found_dot) break; + found_dot = 1; + } else if (rt_from_call[i] < '0' || rt_from_call[i] > '9') { + break; + } + rtversion[i] = rt_from_call[i]; + } + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* InitStrings */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..988a729 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pxd b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pxd new file mode 100644 index 0000000..51bb495 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pxd @@ -0,0 +1,27 @@ +cdef class PyDBAdditionalThreadInfo: + cdef public int pydev_state + cdef public object pydev_step_stop # Actually, it's a frame or None + cdef public int pydev_original_step_cmd + cdef public int pydev_step_cmd + cdef public bint pydev_notify_kill + cdef public object pydev_smart_step_stop # Actually, it's a frame or None + cdef public bint pydev_django_resolve_frame + cdef public object pydev_call_from_jinja2 + cdef public object pydev_call_inside_jinja2 + cdef public int is_tracing + cdef public tuple conditional_breakpoint_exception + cdef public str pydev_message + cdef public int suspend_type + cdef public int pydev_next_line + cdef public str pydev_func_name + cdef public bint suspended_at_unhandled + cdef public str trace_suspend_type + cdef public object top_level_thread_tracer_no_back_frames + cdef public object top_level_thread_tracer_unhandled + cdef public object thread_tracer + cdef public object step_in_initial_location + cdef public int pydev_smart_parent_offset + cdef public int pydev_smart_child_offset + cdef public tuple pydev_smart_step_into_variants + cdef public dict target_id_to_smart_step_into_variant + cdef public bint pydev_use_scoped_step_frame diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pyx b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pyx new file mode 100644 index 0000000..da21f49 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pyx @@ -0,0 +1,1870 @@ +from __future__ import print_function + +# Important: Autogenerated file. + +# DO NOT edit manually! +# DO NOT edit manually! +from _pydevd_bundle.pydevd_constants import (STATE_RUN, PYTHON_SUSPEND, SUPPORT_GEVENT, ForkSafeLock, + _current_frames) +from _pydev_bundle import pydev_log +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +pydev_log.debug("Using Cython speedups") +# ELSE +# from _pydevd_bundle.pydevd_frame import PyDBFrame +# ENDIF + +version = 11 + + +#======================================================================================================================= +# PyDBAdditionalThreadInfo +#======================================================================================================================= +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class PyDBAdditionalThreadInfo: +# ELSE +# class PyDBAdditionalThreadInfo(object): +# ENDIF + + # Note: the params in cython are declared in pydevd_cython.pxd. + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + # ELSE +# __slots__ = [ +# 'pydev_state', +# 'pydev_step_stop', +# 'pydev_original_step_cmd', +# 'pydev_step_cmd', +# 'pydev_notify_kill', +# 'pydev_django_resolve_frame', +# 'pydev_call_from_jinja2', +# 'pydev_call_inside_jinja2', +# 'is_tracing', +# 'conditional_breakpoint_exception', +# 'pydev_message', +# 'suspend_type', +# 'pydev_next_line', +# 'pydev_func_name', +# 'suspended_at_unhandled', +# 'trace_suspend_type', +# 'top_level_thread_tracer_no_back_frames', +# 'top_level_thread_tracer_unhandled', +# 'thread_tracer', +# 'step_in_initial_location', +# +# # Used for CMD_SMART_STEP_INTO (to know which smart step into variant to use) +# 'pydev_smart_parent_offset', +# 'pydev_smart_child_offset', +# +# # Used for CMD_SMART_STEP_INTO (list[_pydevd_bundle.pydevd_bytecode_utils.Variant]) +# # Filled when the cmd_get_smart_step_into_variants is requested (so, this is a copy +# # of the last request for a given thread and pydev_smart_parent_offset/pydev_smart_child_offset relies on it). +# 'pydev_smart_step_into_variants', +# 'target_id_to_smart_step_into_variant', +# +# 'pydev_use_scoped_step_frame', +# ] + # ENDIF + + def __init__(self): + self.pydev_state = STATE_RUN # STATE_RUN or STATE_SUSPEND + self.pydev_step_stop = None + + # Note: we have `pydev_original_step_cmd` and `pydev_step_cmd` because the original is to + # say the action that started it and the other is to say what's the current tracing behavior + # (because it's possible that we start with a step over but may have to switch to a + # different step strategy -- for instance, if a step over is done and we return the current + # method the strategy is changed to a step in). + + self.pydev_original_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + + self.pydev_notify_kill = False + self.pydev_django_resolve_frame = False + self.pydev_call_from_jinja2 = None + self.pydev_call_inside_jinja2 = None + self.is_tracing = 0 + self.conditional_breakpoint_exception = None + self.pydev_message = '' + self.suspend_type = PYTHON_SUSPEND + self.pydev_next_line = -1 + self.pydev_func_name = '.invalid.' # Must match the type in cython + self.suspended_at_unhandled = False + self.trace_suspend_type = 'trace' # 'trace' or 'frame_eval' + self.top_level_thread_tracer_no_back_frames = [] + self.top_level_thread_tracer_unhandled = None + self.thread_tracer = None + self.step_in_initial_location = None + self.pydev_smart_parent_offset = -1 + self.pydev_smart_child_offset = -1 + self.pydev_smart_step_into_variants = () + self.target_id_to_smart_step_into_variant = {} + + # Flag to indicate ipython use-case where each line will be executed as a call/line/return + # in a new new frame but in practice we want to consider each new frame as if it was all + # part of the same frame. + # + # In practice this means that a step over shouldn't revert to a step in and we need some + # special logic to know when we should stop in a step over as we need to consider 2 + # different frames as being equal if they're logically the continuation of a frame + # being executed by ipython line by line. + # + # See: https://github.com/microsoft/debugpy/issues/869#issuecomment-1132141003 + self.pydev_use_scoped_step_frame = False + + def get_topmost_frame(self, thread): + ''' + Gets the topmost frame for the given thread. Note that it may be None + and callers should remove the reference to the frame as soon as possible + to avoid disturbing user code. + ''' + # sys._current_frames(): dictionary with thread id -> topmost frame + current_frames = _current_frames() + topmost_frame = current_frames.get(thread.ident) + if topmost_frame is None: + # Note: this is expected for dummy threads (so, getting the topmost frame should be + # treated as optional). + pydev_log.info( + 'Unable to get topmost frame for thread: %s, thread.ident: %s, id(thread): %s\nCurrent frames: %s.\n' + 'GEVENT_SUPPORT: %s', + thread, + thread.ident, + id(thread), + current_frames, + SUPPORT_GEVENT, + ) + + return topmost_frame + + def __str__(self): + return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + + +_set_additional_thread_info_lock = ForkSafeLock() + + +def set_additional_thread_info(thread): + try: + additional_info = thread.additional_info + if additional_info is None: + raise AttributeError() + except: + with _set_additional_thread_info_lock: + # If it's not there, set it within a lock to avoid any racing + # conditions. + additional_info = getattr(thread, 'additional_info', None) + if additional_info is None: + additional_info = PyDBAdditionalThreadInfo() + thread.additional_info = additional_info + + return additional_info +import linecache +import os.path +import re + +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_dont_trace +from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE, + EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED, PYDEVD_IPYTHON_CONTEXT) +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace +from _pydevd_bundle.pydevd_utils import get_clsname_for_code +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame +from _pydevd_bundle.pydevd_comm_constants import constant_to_str, CMD_SET_FUNCTION_BREAK +try: + from _pydevd_bundle.pydevd_bytecode_utils import get_smart_step_into_variant_from_frame_offset +except ImportError: + + def get_smart_step_into_variant_from_frame_offset(*args, **kwargs): + return None + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +# ELSE +# # Note: those are now inlined on cython. +# 107 = 107 +# 144 = 144 +# 109 = 109 +# 160 = 160 +# 108 = 108 +# 159 = 159 +# 137 = 137 +# 111 = 111 +# 128 = 128 +# 206 = 206 +# 1 = 1 +# 2 = 2 +# ENDIF + +basename = os.path.basename + +IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') +DEBUG_START = ('pydevd.py', 'run') +DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') +TRACE_PROPERTY = 'pydevd_traceproperty.py' + +import dis + +try: + StopAsyncIteration +except NameError: + StopAsyncIteration = StopIteration + + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef is_unhandled_exception(container_obj, py_db, frame, int last_raise_line, set raise_lines): +# ELSE +# def is_unhandled_exception(container_obj, py_db, frame, last_raise_line, raise_lines): +# ENDIF + if frame.f_lineno in raise_lines: + return True + + else: + try_except_infos = container_obj.try_except_infos + if try_except_infos is None: + container_obj.try_except_infos = try_except_infos = py_db.collect_try_except_info(frame.f_code) + + if not try_except_infos: + # Consider the last exception as unhandled because there's no try..except in it. + return True + else: + # Now, consider only the try..except for the raise + valid_try_except_infos = [] + for try_except_info in try_except_infos: + if try_except_info.is_line_in_try_block(last_raise_line): + valid_try_except_infos.append(try_except_info) + + if not valid_try_except_infos: + return True + + else: + # Note: check all, not only the "valid" ones to cover the case + # in "tests_python.test_tracing_on_top_level.raise_unhandled10" + # where one try..except is inside the other with only a raise + # and it's gotten in the except line. + for try_except_info in try_except_infos: + if try_except_info.is_line_in_except_block(frame.f_lineno): + if ( + frame.f_lineno == try_except_info.except_line or + frame.f_lineno in try_except_info.raise_lines_in_except + ): + # In a raise inside a try..except block or some except which doesn't + # match the raised exception. + return True + return False + + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class _TryExceptContainerObj: + cdef public list try_except_infos; + def __init__(self): + self.try_except_infos = None +# ELSE +# class _TryExceptContainerObj(object): +# ''' +# A dumb container object just to containe the try..except info when needed. Meant to be +# persisent among multiple PyDBFrames to the same code object. +# ''' +# try_except_infos = None +# ENDIF + + +#======================================================================================================================= +# PyDBFrame +#======================================================================================================================= +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class PyDBFrame: +# ELSE +# class PyDBFrame: +# '''This makes the tracing for a given frame, so, the trace_dispatch +# is used initially when we enter into a new context ('call') and then +# is reused for the entire context. +# ''' +# ENDIF + + # Note: class (and not instance) attributes. + + # Same thing in the main debugger but only considering the file contents, while the one in the main debugger + # considers the user input (so, the actual result must be a join of both). + filename_to_lines_where_exceptions_are_ignored = {} + filename_to_stat_info = {} + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef tuple _args + cdef int should_skip + cdef object exc_info + def __init__(self, tuple args): + self._args = args # In the cython version we don't need to pass the frame + self.should_skip = -1 # On cythonized version, put in instance. + self.exc_info = () + # ELSE +# should_skip = -1 # Default value in class (put in instance on set). +# exc_info = () # Default value in class (put in instance on set). +# +# def __init__(self, args): +# # args = main_debugger, abs_path_canonical_path_and_base, base, info, t, frame +# # yeap, much faster than putting in self and then getting it from self later on +# self._args = args + # ENDIF + + def set_suspend(self, *args, **kwargs): + self._args[0].set_suspend(*args, **kwargs) + + def do_wait_suspend(self, *args, **kwargs): + self._args[0].do_wait_suspend(*args, **kwargs) + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + def trace_exception(self, frame, str event, arg): + cdef bint should_stop; + cdef tuple exc_info; + # ELSE +# def trace_exception(self, frame, event, arg): + # ENDIF + if event == 'exception': + should_stop, frame = self._should_stop_on_exception(frame, event, arg) + + if should_stop: + if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + return self.trace_dispatch + + elif event == 'return': + exc_info = self.exc_info + if exc_info and arg is None: + frame_skips_cache, frame_cache_key = self._args[4], self._args[5] + custom_key = (frame_cache_key, 'try_exc_info') + container_obj = frame_skips_cache.get(custom_key) + if container_obj is None: + container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() + if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ + self.handle_user_exception(frame): + return self.trace_dispatch + + return self.trace_exception + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef _should_stop_on_exception(self, frame, str event, arg): + cdef PyDBAdditionalThreadInfo info; + cdef bint should_stop; + cdef bint was_just_raised; + cdef list check_excs; + # ELSE +# def _should_stop_on_exception(self, frame, event, arg): + # ENDIF + + # main_debugger, _filename, info, _thread = self._args + main_debugger = self._args[0] + info = self._args[2] + should_stop = False + + # 2 = 2 + if info.pydev_state != 2: # and breakpoint is not None: + exception, value, trace = arg + + if trace is not None and hasattr(trace, 'tb_next'): + # on jython trace is None on the first event and it may not have a tb_next. + + should_stop = False + exception_breakpoint = None + try: + if main_debugger.plugin is not None: + result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + if result: + should_stop, frame = result + except: + pydev_log.exception() + + if not should_stop: + # Apply checks that don't need the exception breakpoint (where we shouldn't ever stop). + if exception == SystemExit and main_debugger.ignore_system_exit_code(value): + pass + + elif exception in (GeneratorExit, StopIteration, StopAsyncIteration): + # These exceptions are control-flow related (they work as a generator + # pause), so, we shouldn't stop on them. + pass + + elif ignore_exception_trace(trace): + pass + + else: + was_just_raised = trace.tb_next is None + + # It was not handled by any plugin, lets check exception breakpoints. + check_excs = [] + + # Note: check user unhandled before regular exceptions. + exc_break_user = main_debugger.get_exception_breakpoint( + exception, main_debugger.break_on_user_uncaught_exceptions) + if exc_break_user is not None: + check_excs.append((exc_break_user, True)) + + exc_break_caught = main_debugger.get_exception_breakpoint( + exception, main_debugger.break_on_caught_exceptions) + if exc_break_caught is not None: + check_excs.append((exc_break_caught, False)) + + for exc_break, is_user_uncaught in check_excs: + # Initially mark that it should stop and then go into exclusions. + should_stop = True + + if main_debugger.exclude_exception_by_filter(exc_break, trace): + pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name)) + should_stop = False + + elif exc_break.condition is not None and \ + not main_debugger.handle_breakpoint_condition(info, exc_break, frame): + should_stop = False + + elif is_user_uncaught: + # Note: we don't stop here, we just collect the exc_info to use later on... + should_stop = False + if not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) \ + and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)): + # User uncaught means that we're currently in user code but the code + # up the stack is library code. + exc_info = self.exc_info + if not exc_info: + exc_info = (arg, frame.f_lineno, set([frame.f_lineno])) + else: + lines = exc_info[2] + lines.add(frame.f_lineno) + exc_info = (arg, frame.f_lineno, lines) + self.exc_info = exc_info + else: + # I.e.: these are only checked if we're not dealing with user uncaught exceptions. + if exc_break.notify_on_first_raise_only and main_debugger.skip_on_exceptions_thrown_in_same_context \ + and not was_just_raised and not just_raised(trace.tb_next): + # In this case we never stop if it was just raised, so, to know if it was the first we + # need to check if we're in the 2nd method. + should_stop = False # I.e.: we stop only when we're at the caller of a method that throws an exception + + elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ + and not was_just_raised: + should_stop = False # I.e.: we stop only when it was just raised + + elif was_just_raised and main_debugger.skip_on_exceptions_thrown_in_same_context: + # Option: Don't break if an exception is caught in the same function from which it is thrown + should_stop = False + + if should_stop: + exception_breakpoint = exc_break + try: + info.pydev_message = exc_break.qname + except: + info.pydev_message = exc_break.qname.encode('utf-8') + break + + if should_stop: + # Always add exception to frame (must remove later after we proceed). + add_exception_to_frame(frame, (exception, value, trace)) + + if exception_breakpoint is not None and exception_breakpoint.expression is not None: + main_debugger.handle_breakpoint_expression(exception_breakpoint, info, frame) + + return should_stop, frame + + def handle_user_exception(self, frame): + exc_info = self.exc_info + if exc_info: + return self._handle_exception(frame, 'exception', exc_info[0], EXCEPTION_TYPE_USER_UNHANDLED) + return False + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef _handle_exception(self, frame, str event, arg, str exception_type): + cdef bint stopped; + cdef tuple abs_real_path_and_base; + cdef str absolute_filename; + cdef str canonical_normalized_filename; + cdef dict filename_to_lines_where_exceptions_are_ignored; + cdef dict lines_ignored; + cdef dict frame_id_to_frame; + cdef dict merged; + cdef object trace_obj; + cdef object main_debugger; + # ELSE +# def _handle_exception(self, frame, event, arg, exception_type): + # ENDIF + stopped = False + try: + # print('_handle_exception', frame.f_lineno, frame.f_code.co_name) + + # We have 3 things in arg: exception type, description, traceback object + trace_obj = arg[2] + main_debugger = self._args[0] + + initial_trace_obj = trace_obj + if trace_obj.tb_next is None and trace_obj.tb_frame is frame: + # I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + pass + else: + # Get the trace_obj from where the exception was raised... + while trace_obj.tb_next is not None: + trace_obj = trace_obj.tb_next + + if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + for check_trace_obj in (initial_trace_obj, trace_obj): + abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame) + absolute_filename = abs_real_path_and_base[0] + canonical_normalized_filename = abs_real_path_and_base[1] + + filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored + + lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + if lines_ignored is None: + lines_ignored = filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} + + try: + curr_stat = os.stat(absolute_filename) + curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + except: + curr_stat = None + + last_stat = self.filename_to_stat_info.get(absolute_filename) + if last_stat != curr_stat: + self.filename_to_stat_info[absolute_filename] = curr_stat + lines_ignored.clear() + try: + linecache.checkcache(absolute_filename) + except: + pydev_log.exception('Error in linecache.checkcache(%r)', absolute_filename) + + from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + if from_user_input: + merged = {} + merged.update(lines_ignored) + # Override what we have with the related entries that the user entered + merged.update(from_user_input) + else: + merged = lines_ignored + + exc_lineno = check_trace_obj.tb_lineno + + # print ('lines ignored', lines_ignored) + # print ('user input', from_user_input) + # print ('merged', merged, 'curr', exc_lineno) + + if exc_lineno not in merged: # Note: check on merged but update lines_ignored. + try: + line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + except: + pydev_log.exception('Error in linecache.getline(%r, %s, f_globals)', absolute_filename, exc_lineno) + line = '' + + if IGNORE_EXCEPTION_TAG.match(line) is not None: + lines_ignored[exc_lineno] = 1 + return False + else: + # Put in the cache saying not to ignore + lines_ignored[exc_lineno] = 0 + else: + # Ok, dict has it already cached, so, let's check it... + if merged.get(exc_lineno, 0): + return False + + thread = self._args[3] + + try: + frame_id_to_frame = {} + frame_id_to_frame[id(frame)] = frame + f = trace_obj.tb_frame + while f is not None: + frame_id_to_frame[id(f)] = f + f = f.f_back + f = None + + stopped = True + main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + try: + self.set_suspend(thread, 137) + self.do_wait_suspend(thread, frame, event, arg, exception_type=exception_type) + finally: + main_debugger.send_caught_exception_stack_proceeded(thread) + except: + pydev_log.exception() + + main_debugger.set_trace_for_frame_and_parents(frame) + finally: + # Make sure the user cannot see the '__exception__' we added after we leave the suspend state. + remove_exception_from_frame(frame) + # Clear some local variables... + frame = None + trace_obj = None + initial_trace_obj = None + check_trace_obj = None + f = None + frame_id_to_frame = None + main_debugger = None + thread = None + + return stopped + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef get_func_name(self, frame): + cdef str func_name + # ELSE +# def get_func_name(self, frame): + # ENDIF + code_obj = frame.f_code + func_name = code_obj.co_name + try: + cls_name = get_clsname_for_code(code_obj, frame) + if cls_name is not None: + return "%s.%s" % (cls_name, func_name) + else: + return func_name + except: + pydev_log.exception() + return func_name + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef _show_return_values(self, frame, arg): + # ELSE +# def _show_return_values(self, frame, arg): + # ENDIF + try: + try: + f_locals_back = getattr(frame.f_back, "f_locals", None) + if f_locals_back is not None: + return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + if return_values_dict is None: + return_values_dict = {} + f_locals_back[RETURN_VALUES_DICT] = return_values_dict + name = self.get_func_name(frame) + return_values_dict[name] = arg + except: + pydev_log.exception() + finally: + f_locals_back = None + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef _remove_return_values(self, main_debugger, frame): + # ELSE +# def _remove_return_values(self, main_debugger, frame): + # ENDIF + try: + try: + # Showing return values was turned off, we should remove them from locals dict. + # The values can be in the current frame or in the back one + frame.f_locals.pop(RETURN_VALUES_DICT, None) + + f_locals_back = getattr(frame.f_back, "f_locals", None) + if f_locals_back is not None: + f_locals_back.pop(RETURN_VALUES_DICT, None) + except: + pydev_log.exception() + finally: + f_locals_back = None + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef _get_unfiltered_back_frame(self, main_debugger, frame): + # ELSE +# def _get_unfiltered_back_frame(self, main_debugger, frame): + # ENDIF + f = frame.f_back + while f is not None: + if not main_debugger.is_files_filter_enabled: + return f + + else: + if main_debugger.apply_files_filter(f, f.f_code.co_filename, False): + f = f.f_back + + else: + return f + + return f + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef _is_same_frame(self, target_frame, current_frame): + cdef PyDBAdditionalThreadInfo info; + # ELSE +# def _is_same_frame(self, target_frame, current_frame): + # ENDIF + if target_frame is current_frame: + return True + + info = self._args[2] + if info.pydev_use_scoped_step_frame: + # If using scoped step we don't check the target, we just need to check + # if the current matches the same heuristic where the target was defined. + if target_frame is not None and current_frame is not None: + if target_frame.f_code.co_filename == current_frame.f_code.co_filename: + # The co_name may be different (it may include the line number), but + # the filename must still be the same. + f = current_frame.f_back + if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + f = f.f_back + if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + return True + + return False + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cpdef trace_dispatch(self, frame, str event, arg): + cdef tuple abs_path_canonical_path_and_base; + cdef bint is_exception_event; + cdef bint has_exception_breakpoints; + cdef bint can_skip; + cdef bint stop; + cdef PyDBAdditionalThreadInfo info; + cdef int step_cmd; + cdef int line; + cdef bint is_line; + cdef bint is_call; + cdef bint is_return; + cdef bint should_stop; + cdef dict breakpoints_for_file; + cdef dict stop_info; + cdef str curr_func_name; + cdef bint exist_result; + cdef dict frame_skips_cache; + cdef object frame_cache_key; + cdef tuple line_cache_key; + cdef int breakpoints_in_line_cache; + cdef int breakpoints_in_frame_cache; + cdef bint has_breakpoint_in_frame; + cdef bint is_coroutine_or_generator; + cdef int bp_line; + cdef object bp; + cdef int pydev_smart_parent_offset + cdef int pydev_smart_child_offset + cdef tuple pydev_smart_step_into_variants + # ELSE +# def trace_dispatch(self, frame, event, arg): + # ENDIF + # Note: this is a big function because most of the logic related to hitting a breakpoint and + # stepping is contained in it. Ideally this could be split among multiple functions, but the + # problem in this case is that in pure-python function calls are expensive and even more so + # when tracing is on (because each function call will get an additional tracing call). We + # try to address this by using the info.is_tracing for the fastest possible return, but the + # cost is still high (maybe we could use code-generation in the future and make the code + # generation be better split among what each part does). + + # DEBUG = '_debugger_case_generator.py' in frame.f_code.co_filename + main_debugger, abs_path_canonical_path_and_base, info, thread, frame_skips_cache, frame_cache_key = self._args + # if DEBUG: print('frame trace_dispatch %s %s %s %s %s %s, stop: %s' % (frame.f_lineno, frame.f_code.co_name, frame.f_code.co_filename, event, constant_to_str(info.pydev_step_cmd), arg, info.pydev_step_stop)) + try: + info.is_tracing += 1 + + # TODO: This shouldn't be needed. The fact that frame.f_lineno + # is None seems like a bug in Python 3.11. + # Reported in: https://github.com/python/cpython/issues/94485 + line = frame.f_lineno or 0 # Workaround or case where frame.f_lineno is None + line_cache_key = (frame_cache_key, line) + + if main_debugger.pydb_disposed: + return None if event == 'call' else NO_FTRACE + + plugin_manager = main_debugger.plugin + has_exception_breakpoints = ( + main_debugger.break_on_caught_exceptions + or main_debugger.break_on_user_uncaught_exceptions + or main_debugger.has_plugin_exception_breaks) + + stop_frame = info.pydev_step_stop + step_cmd = info.pydev_step_cmd + function_breakpoint_on_call_event = None + + if frame.f_code.co_flags & 0xa0: # 0xa0 == CO_GENERATOR = 0x20 | CO_COROUTINE = 0x80 + # Dealing with coroutines and generators: + # When in a coroutine we change the perceived event to the debugger because + # a call, StopIteration exception and return are usually just pausing/unpausing it. + if event == 'line': + is_line = True + is_call = False + is_return = False + is_exception_event = False + + elif event == 'return': + is_line = False + is_call = False + is_return = True + is_exception_event = False + + returns_cache_key = (frame_cache_key, 'returns') + return_lines = frame_skips_cache.get(returns_cache_key) + if return_lines is None: + # Note: we're collecting the return lines by inspecting the bytecode as + # there are multiple returns and multiple stop iterations when awaiting and + # it doesn't give any clear indication when a coroutine or generator is + # finishing or just pausing. + return_lines = set() + for x in main_debugger.collect_return_info(frame.f_code): + # Note: cython does not support closures in cpdefs (so we can't use + # a list comprehension). + return_lines.add(x.return_line) + + frame_skips_cache[returns_cache_key] = return_lines + + if line not in return_lines: + # Not really a return (coroutine/generator paused). + return self.trace_dispatch + else: + if self.exc_info: + self.handle_user_exception(frame) + return self.trace_dispatch + + # Tricky handling: usually when we're on a frame which is about to exit + # we set the step mode to step into, but in this case we'd end up in the + # asyncio internal machinery, which is not what we want, so, we just + # ask the stop frame to be a level up. + # + # Note that there's an issue here which we may want to fix in the future: if + # the back frame is a frame which is filtered, we won't stop properly. + # Solving this may not be trivial as we'd need to put a scope in the step + # in, but we may have to do it anyways to have a step in which doesn't end + # up in asyncio). + # + # Note2: we don't revert to a step in if we're doing scoped stepping + # (because on scoped stepping we're always receiving a call/line/return + # event for each line in ipython, so, we can't revert to step in on return + # as the return shouldn't mean that we've actually completed executing a + # frame in this case). + if stop_frame is frame and not info.pydev_use_scoped_step_frame: + if step_cmd in (108, 159, 107, 144): + f = self._get_unfiltered_back_frame(main_debugger, frame) + if f is not None: + info.pydev_step_cmd = 206 + info.pydev_step_stop = f + else: + if step_cmd == 108: + info.pydev_step_cmd = 107 + info.pydev_step_stop = None + + elif step_cmd == 159: + info.pydev_step_cmd = 144 + info.pydev_step_stop = None + + elif step_cmd == 206: + # We're exiting this one, so, mark the new coroutine context. + f = self._get_unfiltered_back_frame(main_debugger, frame) + if f is not None: + info.pydev_step_stop = f + else: + info.pydev_step_cmd = 107 + info.pydev_step_stop = None + + elif event == 'exception': + breakpoints_for_file = None + if has_exception_breakpoints: + should_stop, frame = self._should_stop_on_exception(frame, event, arg) + if should_stop: + if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + return self.trace_dispatch + + return self.trace_dispatch + else: + # event == 'call' or event == 'c_XXX' + return self.trace_dispatch + + else: # Not coroutine nor generator + if event == 'line': + is_line = True + is_call = False + is_return = False + is_exception_event = False + + elif event == 'return': + is_line = False + is_return = True + is_call = False + is_exception_event = False + + # If we are in single step mode and something causes us to exit the current frame, we need to make sure we break + # eventually. Force the step mode to step into and the step stop frame to None. + # I.e.: F6 in the end of a function should stop in the next possible position (instead of forcing the user + # to make a step in or step over at that location). + # Note: this is especially troublesome when we're skipping code with the + # @DontTrace comment. + if ( + stop_frame is frame and + not info.pydev_use_scoped_step_frame and is_return and + step_cmd in (108, 109, 159, 160, 128) + ): + + if step_cmd in (108, 109, 128): + info.pydev_step_cmd = 107 + else: + info.pydev_step_cmd = 144 + info.pydev_step_stop = None + + if self.exc_info: + if self.handle_user_exception(frame): + return self.trace_dispatch + + elif event == 'call': + is_line = False + is_call = True + is_return = False + is_exception_event = False + if frame.f_code.co_firstlineno == frame.f_lineno: # Check line to deal with async/await. + function_breakpoint_on_call_event = main_debugger.function_breakpoint_name_to_breakpoint.get(frame.f_code.co_name) + + elif event == 'exception': + is_exception_event = True + breakpoints_for_file = None + if has_exception_breakpoints: + should_stop, frame = self._should_stop_on_exception(frame, event, arg) + if should_stop: + if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + return self.trace_dispatch + is_line = False + is_return = False + is_call = False + + else: + # Unexpected: just keep the same trace func (i.e.: event == 'c_XXX'). + return self.trace_dispatch + + if not is_exception_event: + breakpoints_for_file = main_debugger.breakpoints.get(abs_path_canonical_path_and_base[1]) + + can_skip = False + + if info.pydev_state == 1: # 1 = 1 + # we can skip if: + # - we have no stop marked + # - we should make a step return/step over and we're not in the current frame + # - we're stepping into a coroutine context and we're not in that context + if step_cmd == -1: + can_skip = True + + elif step_cmd in (108, 109, 159, 160) and not self._is_same_frame(stop_frame, frame): + can_skip = True + + elif step_cmd == 128 and ( + stop_frame is not None and + stop_frame is not frame and + stop_frame is not frame.f_back and + (frame.f_back is None or stop_frame is not frame.f_back.f_back)): + can_skip = True + + elif step_cmd == 144: + if ( + main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) + and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)) + ): + can_skip = True + + elif step_cmd == 206: + f = frame + while f is not None: + if self._is_same_frame(stop_frame, f): + break + f = f.f_back + else: + can_skip = True + + if can_skip: + if plugin_manager is not None and ( + main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks): + can_skip = plugin_manager.can_skip(main_debugger, frame) + + if can_skip and main_debugger.show_return_values and info.pydev_step_cmd in (108, 159) and self._is_same_frame(stop_frame, frame.f_back): + # trace function for showing return values after step over + can_skip = False + + # Let's check to see if we are in a function that has a breakpoint. If we don't have a breakpoint, + # we will return nothing for the next trace + # also, after we hit a breakpoint and go to some other debugging state, we have to force the set trace anyway, + # so, that's why the additional checks are there. + + if function_breakpoint_on_call_event: + pass # Do nothing here (just keep on going as we can't skip it). + + elif not breakpoints_for_file: + if can_skip: + if has_exception_breakpoints: + return self.trace_exception + else: + return None if is_call else NO_FTRACE + + else: + # When cached, 0 means we don't have a breakpoint and 1 means we have. + if can_skip: + breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + if breakpoints_in_line_cache == 0: + return self.trace_dispatch + + breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + if breakpoints_in_frame_cache != -1: + # Gotten from cache. + has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 + + else: + has_breakpoint_in_frame = False + + try: + func_lines = set() + for offset_and_lineno in dis.findlinestarts(frame.f_code): + func_lines.add(offset_and_lineno[1]) + except: + # This is a fallback for implementations where we can't get the function + # lines -- i.e.: jython (in this case clients need to provide the function + # name to decide on the skip or we won't be able to skip the function + # completely). + + # Checks the breakpoint to see if there is a context match in some function. + curr_func_name = frame.f_code.co_name + + # global context is set with an empty name + if curr_func_name in ('?', '', ''): + curr_func_name = '' + + for bp in breakpoints_for_file.values(): + # will match either global or some function + if bp.func_name in ('None', curr_func_name): + has_breakpoint_in_frame = True + break + else: + for bp_line in breakpoints_for_file: # iterate on keys + if bp_line in func_lines: + has_breakpoint_in_frame = True + break + + # Cache the value (1 or 0 or -1 for default because of cython). + if has_breakpoint_in_frame: + frame_skips_cache[frame_cache_key] = 1 + else: + frame_skips_cache[frame_cache_key] = 0 + + if can_skip and not has_breakpoint_in_frame: + if has_exception_breakpoints: + return self.trace_exception + else: + return None if is_call else NO_FTRACE + + # We may have hit a breakpoint or we are already in step mode. Either way, let's check what we should do in this frame + # if DEBUG: print('NOT skipped: %s %s %s %s' % (frame.f_lineno, frame.f_code.co_name, event, frame.__class__.__name__)) + + try: + flag = False + # return is not taken into account for breakpoint hit because we'd have a double-hit in this case + # (one for the line and the other for the return). + + stop_info = {} + breakpoint = None + exist_result = False + stop = False + stop_reason = 111 + bp_type = None + + if function_breakpoint_on_call_event: + breakpoint = function_breakpoint_on_call_event + stop = True + new_frame = frame + stop_reason = CMD_SET_FUNCTION_BREAK + + elif is_line and info.pydev_state != 2 and breakpoints_for_file is not None and line in breakpoints_for_file: + breakpoint = breakpoints_for_file[line] + new_frame = frame + stop = True + if step_cmd in (108, 159) and (self._is_same_frame(stop_frame, frame) and is_line): + stop = False # we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + if result: + exist_result = True + flag, breakpoint, new_frame, bp_type = result + + if breakpoint: + # ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + # lets do the conditional stuff here + if breakpoint.expression is not None: + main_debugger.handle_breakpoint_expression(breakpoint, info, new_frame) + if breakpoint.is_logpoint and info.pydev_message is not None and len(info.pydev_message) > 0: + cmd = main_debugger.cmd_factory.make_io_message(info.pydev_message + os.linesep, '1') + main_debugger.writer.add_command(cmd) + + if stop or exist_result: + eval_result = False + if breakpoint.has_condition: + eval_result = main_debugger.handle_breakpoint_condition(info, breakpoint, new_frame) + + if breakpoint.has_condition: + if not eval_result: + stop = False + elif breakpoint.is_logpoint: + stop = False + + if is_call and (frame.f_code.co_name in ('', '') or (line == 1 and frame.f_code.co_name.startswith(' may be executed having each line compiled as a new + # module, so it's the same case as . + + return self.trace_dispatch + + if main_debugger.show_return_values: + if is_return and ( + (info.pydev_step_cmd in (108, 159, 128) and (self._is_same_frame(stop_frame, frame.f_back))) or + (info.pydev_step_cmd in (109, 160) and (self._is_same_frame(stop_frame, frame))) or + (info.pydev_step_cmd in (107, 206)) or + ( + info.pydev_step_cmd == 144 + and frame.f_back is not None + and not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True) + ) + ): + self._show_return_values(frame, arg) + + elif main_debugger.remove_return_values_flag: + try: + self._remove_return_values(main_debugger, frame) + finally: + main_debugger.remove_return_values_flag = False + + if stop: + self.set_suspend( + thread, + stop_reason, + suspend_other_threads=breakpoint and breakpoint.suspend_policy == "ALL", + ) + + elif flag and plugin_manager is not None: + result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + if result: + frame = result + + # if thread has a suspend flag, we suspend with a busy wait + if info.pydev_state == 2: + self.do_wait_suspend(thread, frame, event, arg) + return self.trace_dispatch + else: + if not breakpoint and is_line: + # No stop from anyone and no breakpoint found in line (cache that). + frame_skips_cache[line_cache_key] = 0 + + except: + pydev_log.exception() + raise + + # step handling. We stop when we hit the right frame + try: + should_skip = 0 + if pydevd_dont_trace.should_trace_hook is not None: + if self.should_skip == -1: + # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + # Which will be handled by this frame is read-only, so, we can cache it safely. + if not pydevd_dont_trace.should_trace_hook(frame, abs_path_canonical_path_and_base[0]): + # -1, 0, 1 to be Cython-friendly + should_skip = self.should_skip = 1 + else: + should_skip = self.should_skip = 0 + else: + should_skip = self.should_skip + + plugin_stop = False + if should_skip: + stop = False + + elif step_cmd in (107, 144, 206): + force_check_project_scope = step_cmd == 144 + if is_line: + if not info.pydev_use_scoped_step_frame: + if force_check_project_scope or main_debugger.is_files_filter_enabled: + stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope) + else: + stop = True + else: + # We can only stop inside the ipython call. + filename = frame.f_code.co_filename + if filename.endswith('.pyc'): + filename = filename[:-1] + + if not filename.endswith(PYDEVD_IPYTHON_CONTEXT[0]): + f = frame.f_back + while f is not None: + if f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + f2 = f.f_back + if f2 is not None and f2.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + pydev_log.debug('Stop inside ipython call') + stop = True + break + f = f.f_back + + del f + + if not stop: + # In scoped mode if step in didn't work in this context it won't work + # afterwards anyways. + return None if is_call else NO_FTRACE + + elif is_return and frame.f_back is not None and not info.pydev_use_scoped_step_frame: + if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + stop = False + else: + if force_check_project_scope or main_debugger.is_files_filter_enabled: + stop = not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, force_check_project_scope) + if stop: + # Prevent stopping in a return to the same location we were initially + # (i.e.: double-stop at the same place due to some filtering). + if info.step_in_initial_location == (frame.f_back, frame.f_back.f_lineno): + stop = False + else: + stop = True + else: + stop = False + + if stop: + if step_cmd == 206: + # i.e.: Check if we're stepping into the proper context. + f = frame + while f is not None: + if self._is_same_frame(stop_frame, f): + break + f = f.f_back + else: + stop = False + + if plugin_manager is not None: + result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + if result: + stop, plugin_stop = result + + elif step_cmd in (108, 159): + # Note: when dealing with a step over my code it's the same as a step over (the + # difference is that when we return from a frame in one we go to regular step + # into and in the other we go to a step into my code). + stop = self._is_same_frame(stop_frame, frame) and is_line + # Note: don't stop on a return for step over, only for line events + # i.e.: don't stop in: (stop_frame is frame.f_back and is_return) as we'd stop twice in that line. + + if plugin_manager is not None: + result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + if result: + stop, plugin_stop = result + + elif step_cmd == 128: + stop = False + back = frame.f_back + if self._is_same_frame(stop_frame, frame) and is_return: + # We're exiting the smart step into initial frame (so, we probably didn't find our target). + stop = True + + elif self._is_same_frame(stop_frame, back) and is_line: + if info.pydev_smart_child_offset != -1: + # i.e.: in this case, we're not interested in the pause in the parent, rather + # we're interested in the pause in the child (when the parent is at the proper place). + stop = False + + else: + pydev_smart_parent_offset = info.pydev_smart_parent_offset + + pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: + # Preferred mode (when the smart step into variants are available + # and the offset is set). + stop = get_smart_step_into_variant_from_frame_offset(back.f_lasti, pydev_smart_step_into_variants) is \ + get_smart_step_into_variant_from_frame_offset(pydev_smart_parent_offset, pydev_smart_step_into_variants) + + else: + # Only the name/line is available, so, check that. + curr_func_name = frame.f_code.co_name + + # global context is set with an empty name + if curr_func_name in ('?', '') or curr_func_name is None: + curr_func_name = '' + if curr_func_name == info.pydev_func_name and stop_frame.f_lineno == info.pydev_next_line: + stop = True + + if not stop: + # In smart step into, if we didn't hit it in this frame once, that'll + # not be the case next time either, so, disable tracing for this frame. + return None if is_call else NO_FTRACE + + elif back is not None and self._is_same_frame(stop_frame, back.f_back) and is_line: + # Ok, we have to track 2 stops at this point, the parent and the child offset. + # This happens when handling a step into which targets a function inside a list comprehension + # or generator (in which case an intermediary frame is created due to an internal function call). + pydev_smart_parent_offset = info.pydev_smart_parent_offset + pydev_smart_child_offset = info.pydev_smart_child_offset + # print('matched back frame', pydev_smart_parent_offset, pydev_smart_child_offset) + # print('parent f_lasti', back.f_back.f_lasti) + # print('child f_lasti', back.f_lasti) + stop = False + if pydev_smart_child_offset >= 0 and pydev_smart_child_offset >= 0: + pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + + if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: + # Note that we don't really check the parent offset, only the offset of + # the child (because this is a generator, the parent may have moved forward + # already -- and that's ok, so, we just check that the parent frame + # matches in this case). + smart_step_into_variant = get_smart_step_into_variant_from_frame_offset(pydev_smart_parent_offset, pydev_smart_step_into_variants) + # print('matched parent offset', pydev_smart_parent_offset) + # Ok, now, check the child variant + children_variants = smart_step_into_variant.children_variants + stop = children_variants and ( + get_smart_step_into_variant_from_frame_offset(back.f_lasti, children_variants) is \ + get_smart_step_into_variant_from_frame_offset(pydev_smart_child_offset, children_variants) + ) + # print('stop at child', stop) + + if not stop: + # In smart step into, if we didn't hit it in this frame once, that'll + # not be the case next time either, so, disable tracing for this frame. + return None if is_call else NO_FTRACE + + elif step_cmd in (109, 160): + stop = is_return and self._is_same_frame(stop_frame, frame) + + else: + stop = False + + if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): + f_code = getattr(frame.f_back, 'f_code', None) + if f_code is not None: + if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + stop = False + + if plugin_stop: + stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + elif stop: + if is_line: + self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + self.do_wait_suspend(thread, frame, event, arg) + elif is_return: # return event + back = frame.f_back + if back is not None: + # When we get to the pydevd run function, the debugging has actually finished for the main thread + # (note that it can still go on for other threads, but for this one, we just make it finish) + # So, just setting it to None should be OK + back_absolute_filename, _, base = get_abs_path_real_path_and_base_from_frame(back) + if (base, back.f_code.co_name) in (DEBUG_START, DEBUG_START_PY3K): + back = None + + elif base == TRACE_PROPERTY: + # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + # if we're in a return, we want it to appear to the user in the previous frame! + return None if is_call else NO_FTRACE + + elif pydevd_dont_trace.should_trace_hook is not None: + if not pydevd_dont_trace.should_trace_hook(back, back_absolute_filename): + # In this case, we'll have to skip the previous one because it shouldn't be traced. + # Also, we have to reset the tracing, because if the parent's parent (or some + # other parent) has to be traced and it's not currently, we wouldn't stop where + # we should anymore (so, a step in/over/return may not stop anywhere if no parent is traced). + # Related test: _debugger_case17a.py + main_debugger.set_trace_for_frame_and_parents(back) + return None if is_call else NO_FTRACE + + if back is not None: + # if we're in a return, we want it to appear to the user in the previous frame! + self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + self.do_wait_suspend(thread, back, event, arg) + else: + # in jython we may not have a back frame + info.pydev_step_stop = None + info.pydev_original_step_cmd = -1 + info.pydev_step_cmd = -1 + info.pydev_state = 1 + + except KeyboardInterrupt: + raise + except: + try: + pydev_log.exception() + info.pydev_original_step_cmd = -1 + info.pydev_step_cmd = -1 + info.pydev_step_stop = None + except: + return None if is_call else NO_FTRACE + + # if we are quitting, let's stop the tracing + if main_debugger.quitting: + return None if is_call else NO_FTRACE + + return self.trace_dispatch + finally: + info.is_tracing -= 1 + + # end trace_dispatch +from _pydev_bundle.pydev_is_thread_alive import is_thread_alive +from _pydev_bundle.pydev_log import exception as pydev_log_exception +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_constants import (get_current_thread_id, NO_FTRACE, + USE_CUSTOM_SYS_CURRENT_FRAMES_MAP, ForkSafeLock) +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +from cpython.object cimport PyObject +from cpython.ref cimport Py_INCREF, Py_XDECREF +# ELSE +# from _pydevd_bundle.pydevd_frame import PyDBFrame, is_unhandled_exception +# ENDIF + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef dict _global_notify_skipped_step_in +# ELSE +# # Note: those are now inlined on cython. +# 107 = 107 +# 144 = 144 +# 109 = 109 +# 160 = 160 +# ENDIF + +# Cache where we should keep that we completely skipped entering some context. +# It needs to be invalidated when: +# - Breakpoints are changed +# It can be used when running regularly (without step over/step in/step return) +global_cache_skips = {} +global_cache_frame_skips = {} + +_global_notify_skipped_step_in = False +_global_notify_skipped_step_in_lock = ForkSafeLock() + + +def notify_skipped_step_in_because_of_filters(py_db, frame): + global _global_notify_skipped_step_in + + with _global_notify_skipped_step_in_lock: + if _global_notify_skipped_step_in: + # Check with lock in place (callers should actually have checked + # before without the lock in place due to performance). + return + _global_notify_skipped_step_in = True + py_db.notify_skipped_step_in_because_of_filters(frame) + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class SafeCallWrapper: + cdef method_object + def __init__(self, method_object): + self.method_object = method_object + def __call__(self, *args): + #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + #in the frame, and that reference might get destroyed by set trace on frame and parents + cdef PyObject* method_obj = self.method_object + Py_INCREF(method_obj) + ret = (method_obj)(*args) + Py_XDECREF (method_obj) + return SafeCallWrapper(ret) if ret is not None else None + def get_method_object(self): + return self.method_object +# ELSE +# ENDIF + + +def fix_top_level_trace_and_get_trace_func(py_db, frame): + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef str filename; + cdef str name; + cdef tuple args; + # ENDIF + + # Note: this is always the first entry-point in the tracing for any thread. + # After entering here we'll set a new tracing function for this thread + # where more information is cached (and will also setup the tracing for + # frames where we should deal with unhandled exceptions). + thread = None + # Cache the frame which should be traced to deal with unhandled exceptions. + # (i.e.: thread entry-points). + + f_unhandled = frame + # print('called at', f_unhandled.f_code.co_name, f_unhandled.f_code.co_filename, f_unhandled.f_code.co_firstlineno) + force_only_unhandled_tracer = False + while f_unhandled is not None: + # name = splitext(basename(f_unhandled.f_code.co_filename))[0] + + name = f_unhandled.f_code.co_filename + # basename + i = name.rfind('/') + j = name.rfind('\\') + if j > i: + i = j + if i >= 0: + name = name[i + 1:] + # remove ext + i = name.rfind('.') + if i >= 0: + name = name[:i] + + if name == 'threading': + if f_unhandled.f_code.co_name in ('__bootstrap', '_bootstrap'): + # We need __bootstrap_inner, not __bootstrap. + return None, False + + elif f_unhandled.f_code.co_name in ('__bootstrap_inner', '_bootstrap_inner'): + # Note: be careful not to use threading.currentThread to avoid creating a dummy thread. + t = f_unhandled.f_locals.get('self') + force_only_unhandled_tracer = True + if t is not None and isinstance(t, threading.Thread): + thread = t + break + + elif name == 'pydev_monkey': + if f_unhandled.f_code.co_name == '__call__': + force_only_unhandled_tracer = True + break + + elif name == 'pydevd': + if f_unhandled.f_code.co_name in ('run', 'main'): + # We need to get to _exec + return None, False + + if f_unhandled.f_code.co_name == '_exec': + force_only_unhandled_tracer = True + break + + elif name == 'pydevd_tracing': + return None, False + + elif f_unhandled.f_back is None: + break + + f_unhandled = f_unhandled.f_back + + if thread is None: + # Important: don't call threadingCurrentThread if we're in the threading module + # to avoid creating dummy threads. + if py_db.threading_get_ident is not None: + thread = py_db.threading_active.get(py_db.threading_get_ident()) + if thread is None: + return None, False + else: + # Jython does not have threading.get_ident(). + thread = py_db.threading_current_thread() + + if getattr(thread, 'pydev_do_not_trace', None): + py_db.disable_tracing() + return None, False + + try: + additional_info = thread.additional_info + if additional_info is None: + raise AttributeError() + except: + additional_info = py_db.set_additional_thread_info(thread) + + # print('enter thread tracer', thread, get_current_thread_id(thread)) + args = (py_db, thread, additional_info, global_cache_skips, global_cache_frame_skips) + + if f_unhandled is not None: + if f_unhandled.f_back is None and not force_only_unhandled_tracer: + # Happens when we attach to a running program (cannot reuse instance because it's mutable). + top_level_thread_tracer = TopLevelThreadTracerNoBackFrame(ThreadTracer(args), args) + additional_info.top_level_thread_tracer_no_back_frames.append(top_level_thread_tracer) # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). + else: + top_level_thread_tracer = additional_info.top_level_thread_tracer_unhandled + if top_level_thread_tracer is None: + # Stop in some internal place to report about unhandled exceptions + top_level_thread_tracer = TopLevelThreadTracerOnlyUnhandledExceptions(args) + additional_info.top_level_thread_tracer_unhandled = top_level_thread_tracer # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). + + # print(' --> found to trace unhandled', f_unhandled.f_code.co_name, f_unhandled.f_code.co_filename, f_unhandled.f_code.co_firstlineno) + f_trace = top_level_thread_tracer.get_trace_dispatch_func() + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + f_trace = SafeCallWrapper(f_trace) + # ENDIF + f_unhandled.f_trace = f_trace + + if frame is f_unhandled: + return f_trace, False + + thread_tracer = additional_info.thread_tracer + if thread_tracer is None or thread_tracer._args[0] is not py_db: + thread_tracer = ThreadTracer(args) + additional_info.thread_tracer = thread_tracer + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + return SafeCallWrapper(thread_tracer), True +# ELSE +# return thread_tracer, True +# ENDIF + + +def trace_dispatch(py_db, frame, event, arg): + thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) + if thread_trace_func is None: + return None if event == 'call' else NO_FTRACE + if apply_to_settrace: + py_db.enable_tracing(thread_trace_func) + return thread_trace_func(frame, event, arg) + + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class TopLevelThreadTracerOnlyUnhandledExceptions: + cdef public tuple _args; + def __init__(self, tuple args): + self._args = args +# ELSE +# class TopLevelThreadTracerOnlyUnhandledExceptions(object): +# +# def __init__(self, args): +# self._args = args +# ENDIF + + def trace_unhandled_exceptions(self, frame, event, arg): + # Note that we ignore the frame as this tracing method should only be put in topmost frames already. + # print('trace_unhandled_exceptions', event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno) + if event == 'exception' and arg is not None: + py_db, t, additional_info = self._args[0:3] + if arg is not None: + if not additional_info.suspended_at_unhandled: + additional_info.suspended_at_unhandled = True + + py_db.stop_on_unhandled_exception(py_db, t, additional_info, arg) + + # No need to reset frame.f_trace to keep the same trace function. + return self.trace_unhandled_exceptions + + def get_trace_dispatch_func(self): + return self.trace_unhandled_exceptions + + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class TopLevelThreadTracerNoBackFrame: + cdef public object _frame_trace_dispatch; + cdef public tuple _args; + cdef public object try_except_infos; + cdef public object _last_exc_arg; + cdef public set _raise_lines; + cdef public int _last_raise_line; + def __init__(self, frame_trace_dispatch, tuple args): + self._frame_trace_dispatch = frame_trace_dispatch + self._args = args + self.try_except_infos = None + self._last_exc_arg = None + self._raise_lines = set() + self._last_raise_line = -1 +# ELSE +# class TopLevelThreadTracerNoBackFrame(object): +# ''' +# This tracer is pretty special in that it's dealing with a frame without f_back (i.e.: top frame +# on remote attach or QThread). +# +# This means that we have to carefully inspect exceptions to discover whether the exception will +# be unhandled or not (if we're dealing with an unhandled exception we need to stop as unhandled, +# otherwise we need to use the regular tracer -- unfortunately the debugger has little info to +# work with in the tracing -- see: https://bugs.python.org/issue34099, so, we inspect bytecode to +# determine if some exception will be traced or not... note that if this is not available -- such +# as on Jython -- we consider any top-level exception to be unnhandled). +# ''' +# +# def __init__(self, frame_trace_dispatch, args): +# self._frame_trace_dispatch = frame_trace_dispatch +# self._args = args +# self.try_except_infos = None +# self._last_exc_arg = None +# self._raise_lines = set() +# self._last_raise_line = -1 +# ENDIF + + def trace_dispatch_and_unhandled_exceptions(self, frame, event, arg): + # DEBUG = 'code_to_debug' in frame.f_code.co_filename + # if DEBUG: print('trace_dispatch_and_unhandled_exceptions: %s %s %s %s %s %s' % (event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno, self._frame_trace_dispatch, frame.f_lineno)) + frame_trace_dispatch = self._frame_trace_dispatch + if frame_trace_dispatch is not None: + self._frame_trace_dispatch = frame_trace_dispatch(frame, event, arg) + + if event == 'exception': + self._last_exc_arg = arg + self._raise_lines.add(frame.f_lineno) + self._last_raise_line = frame.f_lineno + + elif event == 'return' and self._last_exc_arg is not None: + # For unhandled exceptions we actually track the return when at the topmost level. + try: + py_db, t, additional_info = self._args[0:3] + if not additional_info.suspended_at_unhandled: # Note: only check it here, don't set. + if is_unhandled_exception(self, py_db, frame, self._last_raise_line, self._raise_lines): + py_db.stop_on_unhandled_exception(py_db, t, additional_info, self._last_exc_arg) + finally: + # Remove reference to exception after handling it. + self._last_exc_arg = None + + ret = self.trace_dispatch_and_unhandled_exceptions + + # Need to reset (the call to _frame_trace_dispatch may have changed it). + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + frame.f_trace = SafeCallWrapper(ret) + # ELSE +# frame.f_trace = ret + # ENDIF + return ret + + def get_trace_dispatch_func(self): + return self.trace_dispatch_and_unhandled_exceptions + + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class ThreadTracer: + cdef public tuple _args; + def __init__(self, tuple args): + self._args = args +# ELSE +# class ThreadTracer(object): +# +# def __init__(self, args): +# self._args = args +# ENDIF + + def __call__(self, frame, event, arg): + ''' This is the callback used when we enter some context in the debugger. + + We also decorate the thread we are in with info about the debugging. + The attributes added are: + pydev_state + pydev_step_stop + pydev_step_cmd + pydev_notify_kill + + :param PyDB py_db: + This is the global debugger (this method should actually be added as a method to it). + ''' + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef str filename; + cdef str base; + cdef int pydev_step_cmd; + cdef object frame_cache_key; + cdef dict cache_skips; + cdef bint is_stepping; + cdef tuple abs_path_canonical_path_and_base; + cdef PyDBAdditionalThreadInfo additional_info; + # ENDIF + + # DEBUG = 'code_to_debug' in frame.f_code.co_filename + # if DEBUG: print('ENTER: trace_dispatch: %s %s %s %s' % (frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name)) + py_db, t, additional_info, cache_skips, frame_skips_cache = self._args + if additional_info.is_tracing: + return None if event == 'call' else NO_FTRACE # we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + + additional_info.is_tracing += 1 + try: + pydev_step_cmd = additional_info.pydev_step_cmd + is_stepping = pydev_step_cmd != -1 + if py_db.pydb_disposed: + return None if event == 'call' else NO_FTRACE + + # if thread is not alive, cancel trace_dispatch processing + if not is_thread_alive(t): + py_db.notify_thread_not_alive(get_current_thread_id(t)) + return None if event == 'call' else NO_FTRACE + + # Note: it's important that the context name is also given because we may hit something once + # in the global context and another in the local context. + frame_cache_key = frame.f_code + if frame_cache_key in cache_skips: + if not is_stepping: + # if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + else: + # When stepping we can't take into account caching based on the breakpoints (only global filtering). + if cache_skips.get(frame_cache_key) == 1: + + if additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: + notify_skipped_step_in_because_of_filters(py_db, frame) + + back_frame = frame.f_back + if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + back_frame_cache_key = back_frame.f_code + if cache_skips.get(back_frame_cache_key) == 1: + # if DEBUG: print('skipped: trace_dispatch (cache hit: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + else: + # if DEBUG: print('skipped: trace_dispatch (cache hit: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + + try: + # Make fast path faster! + abs_path_canonical_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + abs_path_canonical_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + + file_type = py_db.get_file_type(frame, abs_path_canonical_path_and_base) # we don't want to debug threading or anything related to pydevd + + if file_type is not None: + if file_type == 1: # inlining LIB_FILE = 1 + if not py_db.in_project_scope(frame, abs_path_canonical_path_and_base[0]): + # if DEBUG: print('skipped: trace_dispatch (not in scope)', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + cache_skips[frame_cache_key] = 1 + return None if event == 'call' else NO_FTRACE + else: + # if DEBUG: print('skipped: trace_dispatch', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + cache_skips[frame_cache_key] = 1 + return None if event == 'call' else NO_FTRACE + + if py_db.is_files_filter_enabled: + if py_db.apply_files_filter(frame, abs_path_canonical_path_and_base[0], False): + cache_skips[frame_cache_key] = 1 + + if is_stepping and additional_info.pydev_original_step_cmd in (107, 144) and not _global_notify_skipped_step_in: + notify_skipped_step_in_because_of_filters(py_db, frame) + + # A little gotcha, sometimes when we're stepping in we have to stop in a + # return event showing the back frame as the current frame, so, we need + # to check not only the current frame but the back frame too. + back_frame = frame.f_back + if back_frame is not None and pydev_step_cmd in (107, 144, 109, 160): + if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False): + back_frame_cache_key = back_frame.f_code + cache_skips[back_frame_cache_key] = 1 + # if DEBUG: print('skipped: trace_dispatch (filtered out: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + else: + # if DEBUG: print('skipped: trace_dispatch (filtered out: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + + # if DEBUG: print('trace_dispatch', filename, frame.f_lineno, event, frame.f_code.co_name, file_type) + + # Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak + # reference to the frame). + ret = PyDBFrame( + ( + py_db, abs_path_canonical_path_and_base, additional_info, t, frame_skips_cache, frame_cache_key, + ) + ).trace_dispatch(frame, event, arg) + if ret is None: + # 1 means skipped because of filters. + # 2 means skipped because no breakpoints were hit. + cache_skips[frame_cache_key] = 2 + return None if event == 'call' else NO_FTRACE + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + frame.f_trace = SafeCallWrapper(ret) # Make sure we keep the returned tracer. + # ELSE +# frame.f_trace = ret # Make sure we keep the returned tracer. + # ENDIF + return ret + + except SystemExit: + return None if event == 'call' else NO_FTRACE + + except Exception: + if py_db.pydb_disposed: + return None if event == 'call' else NO_FTRACE # Don't log errors when we're shutting down. + # Log it + try: + if pydev_log_exception is not None: + # This can actually happen during the interpreter shutdown in Python 2.7 + pydev_log_exception() + except: + # Error logging? We're really in the interpreter shutdown... + # (https://github.com/fabioz/PyDev.Debugger/issues/8) + pass + return None if event == 'call' else NO_FTRACE + finally: + additional_info.is_tracing -= 1 + + +if USE_CUSTOM_SYS_CURRENT_FRAMES_MAP: + # This is far from ideal, as we'll leak frames (we'll always have the last created frame, not really + # the last topmost frame saved -- this should be Ok for our usage, but it may leak frames and things + # may live longer... as IronPython is garbage-collected, things should live longer anyways, so, it + # shouldn't be an issue as big as it's in CPython -- it may still be annoying, but this should + # be a reasonable workaround until IronPython itself is able to provide that functionality). + # + # See: https://github.com/IronLanguages/main/issues/1630 + from _pydevd_bundle.pydevd_constants import constructed_tid_to_last_frame + + _original_call = ThreadTracer.__call__ + + def __call__(self, frame, event, arg): + constructed_tid_to_last_frame[self._args[1].ident] = frame + return _original_call(self, frame, event, arg) + + ThreadTracer.__call__ = __call__ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py new file mode 120000 index 0000000..f5967c2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/85/57/f3/86032e9c17ae5641a5f6a56713ab8c145497e03761c4f9742b6f9371de \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_daemon_thread.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_daemon_thread.py new file mode 100644 index 0000000..8729597 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_daemon_thread.py @@ -0,0 +1,193 @@ +from _pydev_bundle._pydev_saved_modules import threading +from _pydev_bundle import _pydev_saved_modules +from _pydevd_bundle.pydevd_utils import notify_about_gevent_if_needed +import weakref +from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_IRONPYTHON, \ + PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS +from _pydev_bundle.pydev_log import exception as pydev_log_exception +import sys +from _pydev_bundle import pydev_log +import pydevd_tracing +from _pydevd_bundle.pydevd_collect_bytecode_info import iter_instructions + +if IS_JYTHON: + import org.python.core as JyCore # @UnresolvedImport + + +class PyDBDaemonThread(threading.Thread): + + def __init__(self, py_db, target_and_args=None): + ''' + :param target_and_args: + tuple(func, args, kwargs) if this should be a function and args to run. + -- Note: use through run_as_pydevd_daemon_thread(). + ''' + threading.Thread.__init__(self) + notify_about_gevent_if_needed() + self._py_db = weakref.ref(py_db) + self._kill_received = False + mark_as_pydevd_daemon_thread(self) + self._target_and_args = target_and_args + + @property + def py_db(self): + return self._py_db() + + def run(self): + created_pydb_daemon = self.py_db.created_pydb_daemon_threads + created_pydb_daemon[self] = 1 + try: + try: + if IS_JYTHON and not isinstance(threading.current_thread(), threading._MainThread): + # we shouldn't update sys.modules for the main thread, cause it leads to the second importing 'threading' + # module, and the new instance of main thread is created + ss = JyCore.PySystemState() + # Note: Py.setSystemState() affects only the current thread. + JyCore.Py.setSystemState(ss) + + self._stop_trace() + self._on_run() + except: + if sys is not None and pydev_log_exception is not None: + pydev_log_exception() + finally: + del created_pydb_daemon[self] + + def _on_run(self): + if self._target_and_args is not None: + target, args, kwargs = self._target_and_args + target(*args, **kwargs) + else: + raise NotImplementedError('Should be reimplemented by: %s' % self.__class__) + + def do_kill_pydev_thread(self): + if not self._kill_received: + pydev_log.debug('%s received kill signal', self.name) + self._kill_received = True + + def _stop_trace(self): + if self.pydev_do_not_trace: + pydevd_tracing.SetTrace(None) # no debugging on this thread + + +def _collect_load_names(func): + found_load_names = set() + for instruction in iter_instructions(func.__code__): + if instruction.opname in ('LOAD_GLOBAL', 'LOAD_ATTR', 'LOAD_METHOD'): + found_load_names.add(instruction.argrepr) + return found_load_names + + +def _patch_threading_to_hide_pydevd_threads(): + ''' + Patches the needed functions on the `threading` module so that the pydevd threads are hidden. + + Note that we patch the functions __code__ to avoid issues if some code had already imported those + variables prior to the patching. + ''' + found_load_names = _collect_load_names(threading.enumerate) + # i.e.: we'll only apply the patching if the function seems to be what we expect. + + new_threading_enumerate = None + + if found_load_names in ( + {'_active_limbo_lock', '_limbo', '_active', 'values', 'list'}, + {'_active_limbo_lock', '_limbo', '_active', 'values', 'NULL + list'} + ): + pydev_log.debug('Applying patching to hide pydevd threads (Py3 version).') + + def new_threading_enumerate(): + with _active_limbo_lock: + ret = list(_active.values()) + list(_limbo.values()) + + return [t for t in ret if not getattr(t, 'is_pydev_daemon_thread', False)] + + elif found_load_names == set(('_active_limbo_lock', '_limbo', '_active', 'values')): + pydev_log.debug('Applying patching to hide pydevd threads (Py2 version).') + + def new_threading_enumerate(): + with _active_limbo_lock: + ret = _active.values() + _limbo.values() + + return [t for t in ret if not getattr(t, 'is_pydev_daemon_thread', False)] + + else: + pydev_log.info('Unable to hide pydevd threads. Found names in threading.enumerate: %s', found_load_names) + + if new_threading_enumerate is not None: + + def pydevd_saved_threading_enumerate(): + with threading._active_limbo_lock: + return list(threading._active.values()) + list(threading._limbo.values()) + + _pydev_saved_modules.pydevd_saved_threading_enumerate = pydevd_saved_threading_enumerate + + threading.enumerate.__code__ = new_threading_enumerate.__code__ + + # We also need to patch the active count (to match what we have in the enumerate). + def new_active_count(): + # Note: as this will be executed in the `threading` module, `enumerate` will + # actually be threading.enumerate. + return len(enumerate()) + + threading.active_count.__code__ = new_active_count.__code__ + + # When shutting down, Python (on some versions) may do something as: + # + # def _pickSomeNonDaemonThread(): + # for t in enumerate(): + # if not t.daemon and t.is_alive(): + # return t + # return None + # + # But in this particular case, we do want threads with `is_pydev_daemon_thread` to appear + # explicitly due to the pydevd `CheckAliveThread` (because we want the shutdown to wait on it). + # So, it can't rely on the `enumerate` for that anymore as it's patched to not return pydevd threads. + if hasattr(threading, '_pickSomeNonDaemonThread'): + + def new_pick_some_non_daemon_thread(): + with _active_limbo_lock: + # Ok for py2 and py3. + threads = list(_active.values()) + list(_limbo.values()) + + for t in threads: + if not t.daemon and t.is_alive(): + return t + return None + + threading._pickSomeNonDaemonThread.__code__ = new_pick_some_non_daemon_thread.__code__ + + +_patched_threading_to_hide_pydevd_threads = False + + +def mark_as_pydevd_daemon_thread(thread): + if not IS_JYTHON and not IS_IRONPYTHON and PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS: + global _patched_threading_to_hide_pydevd_threads + if not _patched_threading_to_hide_pydevd_threads: + # When we mark the first thread as a pydevd daemon thread, we also change the threading + # functions to hide pydevd threads. + # Note: we don't just "hide" the pydevd threads from the threading module by not using it + # (i.e.: just using the `thread.start_new_thread` instead of `threading.Thread`) + # because there's 1 thread (the `CheckAliveThread`) which is a pydevd thread but + # isn't really a daemon thread (so, we need CPython to wait on it for shutdown, + # in which case it needs to be in `threading` and the patching would be needed anyways). + _patched_threading_to_hide_pydevd_threads = True + try: + _patch_threading_to_hide_pydevd_threads() + except: + pydev_log.exception('Error applying patching to hide pydevd threads.') + + thread.pydev_do_not_trace = True + thread.is_pydev_daemon_thread = True + thread.daemon = True + + +def run_as_pydevd_daemon_thread(py_db, func, *args, **kwargs): + ''' + Runs a function as a pydevd daemon thread (without any tracing in place). + ''' + t = PyDBDaemonThread(py_db, target_and_args=(func, args, kwargs)) + t.name = '%s (pydevd daemon thread)' % (func.__name__,) + t.start() + return t diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_defaults.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_defaults.py new file mode 120000 index 0000000..36daadb --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_defaults.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/30/fb/41/9c6bca467eb921b1077ed97463f287d18cbf8d736366ecc42fca844262 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace.py new file mode 120000 index 0000000..e8c048c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bc/e6/29/1f88bab56abe0d7f280ad5b650dbb506b23b3f1f94cac8978cb3f18d4b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py new file mode 100644 index 0000000..d37b1fc --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py @@ -0,0 +1,153 @@ +# Important: Autogenerated file. + +# DO NOT edit manually! +# DO NOT edit manually! + +LIB_FILE = 1 +PYDEV_FILE = 2 + +DONT_TRACE_DIRS = { + '_pydev_bundle': PYDEV_FILE, + '_pydev_runfiles': PYDEV_FILE, + '_pydevd_bundle': PYDEV_FILE, + '_pydevd_frame_eval': PYDEV_FILE, + 'pydev_ipython': PYDEV_FILE, + 'pydev_sitecustomize': PYDEV_FILE, + 'pydevd_attach_to_process': PYDEV_FILE, + 'pydevd_concurrency_analyser': PYDEV_FILE, + 'pydevd_plugins': PYDEV_FILE, + 'test_pydevd_reload': PYDEV_FILE, +} + +DONT_TRACE = { + # commonly used things from the stdlib that we don't want to trace + 'Queue.py':LIB_FILE, + 'queue.py':LIB_FILE, + 'socket.py':LIB_FILE, + 'weakref.py':LIB_FILE, + '_weakrefset.py':LIB_FILE, + 'linecache.py':LIB_FILE, + 'threading.py':LIB_FILE, + 'dis.py':LIB_FILE, + + # things from pydev that we don't want to trace + '__main__pydevd_gen_debug_adapter_protocol.py': PYDEV_FILE, + '_pydev_calltip_util.py': PYDEV_FILE, + '_pydev_completer.py': PYDEV_FILE, + '_pydev_execfile.py': PYDEV_FILE, + '_pydev_filesystem_encoding.py': PYDEV_FILE, + '_pydev_getopt.py': PYDEV_FILE, + '_pydev_imports_tipper.py': PYDEV_FILE, + '_pydev_jy_imports_tipper.py': PYDEV_FILE, + '_pydev_log.py': PYDEV_FILE, + '_pydev_saved_modules.py': PYDEV_FILE, + '_pydev_sys_patch.py': PYDEV_FILE, + '_pydev_tipper_common.py': PYDEV_FILE, + 'django_debug.py': PYDEV_FILE, + 'jinja2_debug.py': PYDEV_FILE, + 'pycompletionserver.py': PYDEV_FILE, + 'pydev_app_engine_debug_startup.py': PYDEV_FILE, + 'pydev_console_utils.py': PYDEV_FILE, + 'pydev_import_hook.py': PYDEV_FILE, + 'pydev_imports.py': PYDEV_FILE, + 'pydev_ipython_console.py': PYDEV_FILE, + 'pydev_ipython_console_011.py': PYDEV_FILE, + 'pydev_is_thread_alive.py': PYDEV_FILE, + 'pydev_localhost.py': PYDEV_FILE, + 'pydev_log.py': PYDEV_FILE, + 'pydev_monkey.py': PYDEV_FILE, + 'pydev_monkey_qt.py': PYDEV_FILE, + 'pydev_override.py': PYDEV_FILE, + 'pydev_run_in_console.py': PYDEV_FILE, + 'pydev_runfiles.py': PYDEV_FILE, + 'pydev_runfiles_coverage.py': PYDEV_FILE, + 'pydev_runfiles_nose.py': PYDEV_FILE, + 'pydev_runfiles_parallel.py': PYDEV_FILE, + 'pydev_runfiles_parallel_client.py': PYDEV_FILE, + 'pydev_runfiles_pytest2.py': PYDEV_FILE, + 'pydev_runfiles_unittest.py': PYDEV_FILE, + 'pydev_runfiles_xml_rpc.py': PYDEV_FILE, + 'pydev_umd.py': PYDEV_FILE, + 'pydev_versioncheck.py': PYDEV_FILE, + 'pydevconsole.py': PYDEV_FILE, + 'pydevconsole_code.py': PYDEV_FILE, + 'pydevd.py': PYDEV_FILE, + 'pydevd_additional_thread_info.py': PYDEV_FILE, + 'pydevd_additional_thread_info_regular.py': PYDEV_FILE, + 'pydevd_api.py': PYDEV_FILE, + 'pydevd_base_schema.py': PYDEV_FILE, + 'pydevd_breakpoints.py': PYDEV_FILE, + 'pydevd_bytecode_utils.py': PYDEV_FILE, + 'pydevd_code_to_source.py': PYDEV_FILE, + 'pydevd_collect_bytecode_info.py': PYDEV_FILE, + 'pydevd_comm.py': PYDEV_FILE, + 'pydevd_comm_constants.py': PYDEV_FILE, + 'pydevd_command_line_handling.py': PYDEV_FILE, + 'pydevd_concurrency_logger.py': PYDEV_FILE, + 'pydevd_console.py': PYDEV_FILE, + 'pydevd_constants.py': PYDEV_FILE, + 'pydevd_custom_frames.py': PYDEV_FILE, + 'pydevd_cython_wrapper.py': PYDEV_FILE, + 'pydevd_daemon_thread.py': PYDEV_FILE, + 'pydevd_defaults.py': PYDEV_FILE, + 'pydevd_dont_trace.py': PYDEV_FILE, + 'pydevd_dont_trace_files.py': PYDEV_FILE, + 'pydevd_exec2.py': PYDEV_FILE, + 'pydevd_extension_api.py': PYDEV_FILE, + 'pydevd_extension_utils.py': PYDEV_FILE, + 'pydevd_file_utils.py': PYDEV_FILE, + 'pydevd_filtering.py': PYDEV_FILE, + 'pydevd_frame.py': PYDEV_FILE, + 'pydevd_frame_eval_cython_wrapper.py': PYDEV_FILE, + 'pydevd_frame_eval_main.py': PYDEV_FILE, + 'pydevd_frame_tracing.py': PYDEV_FILE, + 'pydevd_frame_utils.py': PYDEV_FILE, + 'pydevd_gevent_integration.py': PYDEV_FILE, + 'pydevd_helpers.py': PYDEV_FILE, + 'pydevd_import_class.py': PYDEV_FILE, + 'pydevd_io.py': PYDEV_FILE, + 'pydevd_json_debug_options.py': PYDEV_FILE, + 'pydevd_line_validation.py': PYDEV_FILE, + 'pydevd_modify_bytecode.py': PYDEV_FILE, + 'pydevd_net_command.py': PYDEV_FILE, + 'pydevd_net_command_factory_json.py': PYDEV_FILE, + 'pydevd_net_command_factory_xml.py': PYDEV_FILE, + 'pydevd_plugin_numpy_types.py': PYDEV_FILE, + 'pydevd_plugin_pandas_types.py': PYDEV_FILE, + 'pydevd_plugin_utils.py': PYDEV_FILE, + 'pydevd_plugins_django_form_str.py': PYDEV_FILE, + 'pydevd_process_net_command.py': PYDEV_FILE, + 'pydevd_process_net_command_json.py': PYDEV_FILE, + 'pydevd_referrers.py': PYDEV_FILE, + 'pydevd_reload.py': PYDEV_FILE, + 'pydevd_resolver.py': PYDEV_FILE, + 'pydevd_runpy.py': PYDEV_FILE, + 'pydevd_safe_repr.py': PYDEV_FILE, + 'pydevd_save_locals.py': PYDEV_FILE, + 'pydevd_schema.py': PYDEV_FILE, + 'pydevd_schema_log.py': PYDEV_FILE, + 'pydevd_signature.py': PYDEV_FILE, + 'pydevd_source_mapping.py': PYDEV_FILE, + 'pydevd_stackless.py': PYDEV_FILE, + 'pydevd_suspended_frames.py': PYDEV_FILE, + 'pydevd_thread_lifecycle.py': PYDEV_FILE, + 'pydevd_thread_wrappers.py': PYDEV_FILE, + 'pydevd_timeout.py': PYDEV_FILE, + 'pydevd_trace_api.py': PYDEV_FILE, + 'pydevd_trace_dispatch.py': PYDEV_FILE, + 'pydevd_trace_dispatch_regular.py': PYDEV_FILE, + 'pydevd_traceproperty.py': PYDEV_FILE, + 'pydevd_tracing.py': PYDEV_FILE, + 'pydevd_utils.py': PYDEV_FILE, + 'pydevd_vars.py': PYDEV_FILE, + 'pydevd_vm_type.py': PYDEV_FILE, + 'pydevd_xml.py': PYDEV_FILE, +} + +# if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716) +DONT_TRACE['io.py'] = LIB_FILE + +# Don't trace common encodings too +DONT_TRACE['cp1252.py'] = LIB_FILE +DONT_TRACE['utf_8.py'] = LIB_FILE +DONT_TRACE['codecs.py'] = LIB_FILE diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_exec2.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_exec2.py new file mode 120000 index 0000000..2ab155f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_exec2.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f2/ae/9d/7041c37c913a6ddc0dcab4455dafef60e953ac7cf751506d633867249a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_api.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_api.py new file mode 120000 index 0000000..eb20544 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_api.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b8/f4/fb/f9730d7b5b270570f37ee147e9f0463deeb162e0f9604408d6e3becef0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_utils.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_utils.py new file mode 120000 index 0000000..07c55e5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0d/96/2d/e7aadf466ff8c234971bb714485aa024150abd83a7a473f12165a9411e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_filtering.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_filtering.py new file mode 100644 index 0000000..67fd6cf --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_filtering.py @@ -0,0 +1,331 @@ +import fnmatch +import glob +import os.path +import sys + +from _pydev_bundle import pydev_log +import pydevd_file_utils +import json +from collections import namedtuple +from _pydev_bundle._pydev_saved_modules import threading +from pydevd_file_utils import normcase +from _pydevd_bundle.pydevd_constants import USER_CODE_BASENAMES_STARTING_WITH, \ + LIBRARY_CODE_BASENAMES_STARTING_WITH, IS_PYPY, IS_WINDOWS +from _pydevd_bundle import pydevd_constants + +ExcludeFilter = namedtuple('ExcludeFilter', 'name, exclude, is_path') + + +def _convert_to_str_and_clear_empty(roots): + new_roots = [] + for root in roots: + assert isinstance(root, str), '%s not str (found: %s)' % (root, type(root)) + if root: + new_roots.append(root) + return new_roots + + +def _check_matches(patterns, paths): + if not patterns and not paths: + # Matched to the end. + return True + + if (not patterns and paths) or (patterns and not paths): + return False + + pattern = normcase(patterns[0]) + path = normcase(paths[0]) + + if not glob.has_magic(pattern): + + if pattern != path: + return False + + elif pattern == '**': + if len(patterns) == 1: + return True # if ** is the last one it matches anything to the right. + + for i in range(len(paths)): + # Recursively check the remaining patterns as the + # current pattern could match any number of paths. + if _check_matches(patterns[1:], paths[i:]): + return True + + elif not fnmatch.fnmatch(path, pattern): + # Current part doesn't match. + return False + + return _check_matches(patterns[1:], paths[1:]) + + +def glob_matches_path(path, pattern, sep=os.sep, altsep=os.altsep): + if altsep: + pattern = pattern.replace(altsep, sep) + path = path.replace(altsep, sep) + + drive = '' + if len(path) > 1 and path[1] == ':': + drive, path = path[0], path[2:] + + if drive and len(pattern) > 1: + if pattern[1] == ':': + if drive.lower() != pattern[0].lower(): + return False + pattern = pattern[2:] + + patterns = pattern.split(sep) + paths = path.split(sep) + if paths: + if paths[0] == '': + paths = paths[1:] + if patterns: + if patterns[0] == '': + patterns = patterns[1:] + + return _check_matches(patterns, paths) + + +class FilesFiltering(object): + ''' + Note: calls at FilesFiltering are uncached. + + The actual API used should be through PyDB. + ''' + + def __init__(self): + self._exclude_filters = [] + self._project_roots = [] + self._library_roots = [] + + # Filter out libraries? + self._use_libraries_filter = False + self.require_module = False # True if some exclude filter filters by the module. + + self.set_use_libraries_filter(os.getenv('PYDEVD_FILTER_LIBRARIES') is not None) + + project_roots = os.getenv('IDE_PROJECT_ROOTS', None) + if project_roots is not None: + project_roots = project_roots.split(os.pathsep) + else: + project_roots = [] + self.set_project_roots(project_roots) + + library_roots = os.getenv('LIBRARY_ROOTS', None) + if library_roots is not None: + library_roots = library_roots.split(os.pathsep) + else: + library_roots = self._get_default_library_roots() + self.set_library_roots(library_roots) + + # Stepping filters. + pydevd_filters = os.getenv('PYDEVD_FILTERS', '') + # To filter out it's something as: {'**/not_my_code/**': True} + if pydevd_filters: + pydev_log.debug("PYDEVD_FILTERS %s", (pydevd_filters,)) + if pydevd_filters.startswith('{'): + # dict(glob_pattern (str) -> exclude(True or False)) + exclude_filters = [] + for key, val in json.loads(pydevd_filters).items(): + exclude_filters.append(ExcludeFilter(key, val, True)) + self._exclude_filters = exclude_filters + else: + # A ';' separated list of strings with globs for the + # list of excludes. + filters = pydevd_filters.split(';') + new_filters = [] + for new_filter in filters: + if new_filter.strip(): + new_filters.append(ExcludeFilter(new_filter.strip(), True, True)) + self._exclude_filters = new_filters + + @classmethod + def _get_default_library_roots(cls): + pydev_log.debug("Collecting default library roots.") + # Provide sensible defaults if not in env vars. + import site + + roots = [] + + try: + import sysconfig # Python 2.7 onwards only. + except ImportError: + pass + else: + for path_name in set(('stdlib', 'platstdlib', 'purelib', 'platlib')) & set(sysconfig.get_path_names()): + roots.append(sysconfig.get_path(path_name)) + + # Make sure we always get at least the standard library location (based on the `os` and + # `threading` modules -- it's a bit weird that it may be different on the ci, but it happens). + roots.append(os.path.dirname(os.__file__)) + roots.append(os.path.dirname(threading.__file__)) + if IS_PYPY: + # On PyPy 3.6 (7.3.1) it wrongly says that sysconfig.get_path('stdlib') is + # /lib-pypy when the installed version is /lib_pypy. + try: + import _pypy_wait + except ImportError: + pydev_log.debug("Unable to import _pypy_wait on PyPy when collecting default library roots.") + else: + pypy_lib_dir = os.path.dirname(_pypy_wait.__file__) + pydev_log.debug("Adding %s to default library roots.", pypy_lib_dir) + roots.append(pypy_lib_dir) + + if hasattr(site, 'getusersitepackages'): + site_paths = site.getusersitepackages() + if isinstance(site_paths, (list, tuple)): + for site_path in site_paths: + roots.append(site_path) + else: + roots.append(site_paths) + + if hasattr(site, 'getsitepackages'): + site_paths = site.getsitepackages() + if isinstance(site_paths, (list, tuple)): + for site_path in site_paths: + roots.append(site_path) + else: + roots.append(site_paths) + + for path in sys.path: + if os.path.exists(path) and os.path.basename(path) in ('site-packages', 'pip-global'): + roots.append(path) + + roots.extend([os.path.realpath(path) for path in roots]) + + return sorted(set(roots)) + + def _fix_roots(self, roots): + roots = _convert_to_str_and_clear_empty(roots) + new_roots = [] + for root in roots: + path = self._absolute_normalized_path(root) + if pydevd_constants.IS_WINDOWS: + new_roots.append(path + '\\') + else: + new_roots.append(path + '/') + return new_roots + + def _absolute_normalized_path(self, filename): + ''' + Provides a version of the filename that's absolute and normalized. + ''' + return normcase(pydevd_file_utils.absolute_path(filename)) + + def set_project_roots(self, project_roots): + self._project_roots = self._fix_roots(project_roots) + pydev_log.debug("IDE_PROJECT_ROOTS %s\n" % project_roots) + + def _get_project_roots(self): + return self._project_roots + + def set_library_roots(self, roots): + self._library_roots = self._fix_roots(roots) + pydev_log.debug("LIBRARY_ROOTS %s\n" % roots) + + def _get_library_roots(self): + return self._library_roots + + def in_project_roots(self, received_filename): + ''' + Note: don't call directly. Use PyDb.in_project_scope (there's no caching here and it doesn't + handle all possibilities for knowing whether a project is actually in the scope, it + just handles the heuristics based on the absolute_normalized_filename without the actual frame). + ''' + DEBUG = False + + if received_filename.startswith(USER_CODE_BASENAMES_STARTING_WITH): + if DEBUG: + pydev_log.debug('In in_project_roots - user basenames - starts with %s (%s)', received_filename, USER_CODE_BASENAMES_STARTING_WITH) + return True + + if received_filename.startswith(LIBRARY_CODE_BASENAMES_STARTING_WITH): + if DEBUG: + pydev_log.debug('Not in in_project_roots - library basenames - starts with %s (%s)', received_filename, LIBRARY_CODE_BASENAMES_STARTING_WITH) + return False + + project_roots = self._get_project_roots() # roots are absolute/normalized. + + absolute_normalized_filename = self._absolute_normalized_path(received_filename) + absolute_normalized_filename_as_dir = absolute_normalized_filename + ('\\' if IS_WINDOWS else '/') + + found_in_project = [] + for root in project_roots: + if root and (absolute_normalized_filename.startswith(root) or root == absolute_normalized_filename_as_dir): + if DEBUG: + pydev_log.debug('In project: %s (%s)', absolute_normalized_filename, root) + found_in_project.append(root) + + found_in_library = [] + library_roots = self._get_library_roots() + for root in library_roots: + if root and (absolute_normalized_filename.startswith(root) or root == absolute_normalized_filename_as_dir): + found_in_library.append(root) + if DEBUG: + pydev_log.debug('In library: %s (%s)', absolute_normalized_filename, root) + else: + if DEBUG: + pydev_log.debug('Not in library: %s (%s)', absolute_normalized_filename, root) + + if not project_roots: + # If we have no project roots configured, consider it being in the project + # roots if it's not found in site-packages (because we have defaults for those + # and not the other way around). + in_project = not found_in_library + if DEBUG: + pydev_log.debug('Final in project (no project roots): %s (%s)', absolute_normalized_filename, in_project) + + else: + in_project = False + if found_in_project: + if not found_in_library: + if DEBUG: + pydev_log.debug('Final in project (in_project and not found_in_library): %s (True)', absolute_normalized_filename) + in_project = True + else: + # Found in both, let's see which one has the bigger path matched. + if max(len(x) for x in found_in_project) > max(len(x) for x in found_in_library): + in_project = True + if DEBUG: + pydev_log.debug('Final in project (found in both): %s (%s)', absolute_normalized_filename, in_project) + + return in_project + + def use_libraries_filter(self): + ''' + Should we debug only what's inside project folders? + ''' + return self._use_libraries_filter + + def set_use_libraries_filter(self, use): + pydev_log.debug("pydevd: Use libraries filter: %s\n" % use) + self._use_libraries_filter = use + + def use_exclude_filters(self): + # Enabled if we have any filters registered. + return len(self._exclude_filters) > 0 + + def exclude_by_filter(self, absolute_filename, module_name): + ''' + :return: True if it should be excluded, False if it should be included and None + if no rule matched the given file. + ''' + for exclude_filter in self._exclude_filters: # : :type exclude_filter: ExcludeFilter + if exclude_filter.is_path: + if glob_matches_path(absolute_filename, exclude_filter.name): + return exclude_filter.exclude + else: + # Module filter. + if exclude_filter.name == module_name or module_name.startswith(exclude_filter.name + '.'): + return exclude_filter.exclude + return None + + def set_exclude_filters(self, exclude_filters): + ''' + :param list(ExcludeFilter) exclude_filters: + ''' + self._exclude_filters = exclude_filters + self.require_module = False + for exclude_filter in exclude_filters: + if not exclude_filter.is_path: + self.require_module = True + break diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame.py new file mode 100644 index 0000000..81a3f6f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame.py @@ -0,0 +1,1239 @@ +import linecache +import os.path +import re + +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_dont_trace +from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE, + EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED, PYDEVD_IPYTHON_CONTEXT) +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace +from _pydevd_bundle.pydevd_utils import get_clsname_for_code +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame +from _pydevd_bundle.pydevd_comm_constants import constant_to_str, CMD_SET_FUNCTION_BREAK +try: + from _pydevd_bundle.pydevd_bytecode_utils import get_smart_step_into_variant_from_frame_offset +except ImportError: + + def get_smart_step_into_variant_from_frame_offset(*args, **kwargs): + return None + +# IFDEF CYTHON +# cython_inline_constant: CMD_STEP_INTO = 107 +# cython_inline_constant: CMD_STEP_INTO_MY_CODE = 144 +# cython_inline_constant: CMD_STEP_RETURN = 109 +# cython_inline_constant: CMD_STEP_RETURN_MY_CODE = 160 +# cython_inline_constant: CMD_STEP_OVER = 108 +# cython_inline_constant: CMD_STEP_OVER_MY_CODE = 159 +# cython_inline_constant: CMD_STEP_CAUGHT_EXCEPTION = 137 +# cython_inline_constant: CMD_SET_BREAK = 111 +# cython_inline_constant: CMD_SMART_STEP_INTO = 128 +# cython_inline_constant: CMD_STEP_INTO_COROUTINE = 206 +# cython_inline_constant: STATE_RUN = 1 +# cython_inline_constant: STATE_SUSPEND = 2 +# ELSE +# Note: those are now inlined on cython. +CMD_STEP_INTO = 107 +CMD_STEP_INTO_MY_CODE = 144 +CMD_STEP_RETURN = 109 +CMD_STEP_RETURN_MY_CODE = 160 +CMD_STEP_OVER = 108 +CMD_STEP_OVER_MY_CODE = 159 +CMD_STEP_CAUGHT_EXCEPTION = 137 +CMD_SET_BREAK = 111 +CMD_SMART_STEP_INTO = 128 +CMD_STEP_INTO_COROUTINE = 206 +STATE_RUN = 1 +STATE_SUSPEND = 2 +# ENDIF + +basename = os.path.basename + +IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') +DEBUG_START = ('pydevd.py', 'run') +DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') +TRACE_PROPERTY = 'pydevd_traceproperty.py' + +import dis + +try: + StopAsyncIteration +except NameError: + StopAsyncIteration = StopIteration + + +# IFDEF CYTHON +# cdef is_unhandled_exception(container_obj, py_db, frame, int last_raise_line, set raise_lines): +# ELSE +def is_unhandled_exception(container_obj, py_db, frame, last_raise_line, raise_lines): +# ENDIF + if frame.f_lineno in raise_lines: + return True + + else: + try_except_infos = container_obj.try_except_infos + if try_except_infos is None: + container_obj.try_except_infos = try_except_infos = py_db.collect_try_except_info(frame.f_code) + + if not try_except_infos: + # Consider the last exception as unhandled because there's no try..except in it. + return True + else: + # Now, consider only the try..except for the raise + valid_try_except_infos = [] + for try_except_info in try_except_infos: + if try_except_info.is_line_in_try_block(last_raise_line): + valid_try_except_infos.append(try_except_info) + + if not valid_try_except_infos: + return True + + else: + # Note: check all, not only the "valid" ones to cover the case + # in "tests_python.test_tracing_on_top_level.raise_unhandled10" + # where one try..except is inside the other with only a raise + # and it's gotten in the except line. + for try_except_info in try_except_infos: + if try_except_info.is_line_in_except_block(frame.f_lineno): + if ( + frame.f_lineno == try_except_info.except_line or + frame.f_lineno in try_except_info.raise_lines_in_except + ): + # In a raise inside a try..except block or some except which doesn't + # match the raised exception. + return True + return False + + +# IFDEF CYTHON +# cdef class _TryExceptContainerObj: +# cdef public list try_except_infos; +# def __init__(self): +# self.try_except_infos = None +# ELSE +class _TryExceptContainerObj(object): + ''' + A dumb container object just to containe the try..except info when needed. Meant to be + persisent among multiple PyDBFrames to the same code object. + ''' + try_except_infos = None +# ENDIF + + +#======================================================================================================================= +# PyDBFrame +#======================================================================================================================= +# IFDEF CYTHON +# cdef class PyDBFrame: +# ELSE +class PyDBFrame: + '''This makes the tracing for a given frame, so, the trace_dispatch + is used initially when we enter into a new context ('call') and then + is reused for the entire context. + ''' +# ENDIF + + # Note: class (and not instance) attributes. + + # Same thing in the main debugger but only considering the file contents, while the one in the main debugger + # considers the user input (so, the actual result must be a join of both). + filename_to_lines_where_exceptions_are_ignored = {} + filename_to_stat_info = {} + + # IFDEF CYTHON + # cdef tuple _args + # cdef int should_skip + # cdef object exc_info + # def __init__(self, tuple args): + # self._args = args # In the cython version we don't need to pass the frame + # self.should_skip = -1 # On cythonized version, put in instance. + # self.exc_info = () + # ELSE + should_skip = -1 # Default value in class (put in instance on set). + exc_info = () # Default value in class (put in instance on set). + + def __init__(self, args): + # args = main_debugger, abs_path_canonical_path_and_base, base, info, t, frame + # yeap, much faster than putting in self and then getting it from self later on + self._args = args + # ENDIF + + def set_suspend(self, *args, **kwargs): + self._args[0].set_suspend(*args, **kwargs) + + def do_wait_suspend(self, *args, **kwargs): + self._args[0].do_wait_suspend(*args, **kwargs) + + # IFDEF CYTHON + # def trace_exception(self, frame, str event, arg): + # cdef bint should_stop; + # cdef tuple exc_info; + # ELSE + def trace_exception(self, frame, event, arg): + # ENDIF + if event == 'exception': + should_stop, frame = self._should_stop_on_exception(frame, event, arg) + + if should_stop: + if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + return self.trace_dispatch + + elif event == 'return': + exc_info = self.exc_info + if exc_info and arg is None: + frame_skips_cache, frame_cache_key = self._args[4], self._args[5] + custom_key = (frame_cache_key, 'try_exc_info') + container_obj = frame_skips_cache.get(custom_key) + if container_obj is None: + container_obj = frame_skips_cache[custom_key] = _TryExceptContainerObj() + if is_unhandled_exception(container_obj, self._args[0], frame, exc_info[1], exc_info[2]) and \ + self.handle_user_exception(frame): + return self.trace_dispatch + + return self.trace_exception + + # IFDEF CYTHON + # cdef _should_stop_on_exception(self, frame, str event, arg): + # cdef PyDBAdditionalThreadInfo info; + # cdef bint should_stop; + # cdef bint was_just_raised; + # cdef list check_excs; + # ELSE + def _should_stop_on_exception(self, frame, event, arg): + # ENDIF + + # main_debugger, _filename, info, _thread = self._args + main_debugger = self._args[0] + info = self._args[2] + should_stop = False + + # STATE_SUSPEND = 2 + if info.pydev_state != 2: # and breakpoint is not None: + exception, value, trace = arg + + if trace is not None and hasattr(trace, 'tb_next'): + # on jython trace is None on the first event and it may not have a tb_next. + + should_stop = False + exception_breakpoint = None + try: + if main_debugger.plugin is not None: + result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + if result: + should_stop, frame = result + except: + pydev_log.exception() + + if not should_stop: + # Apply checks that don't need the exception breakpoint (where we shouldn't ever stop). + if exception == SystemExit and main_debugger.ignore_system_exit_code(value): + pass + + elif exception in (GeneratorExit, StopIteration, StopAsyncIteration): + # These exceptions are control-flow related (they work as a generator + # pause), so, we shouldn't stop on them. + pass + + elif ignore_exception_trace(trace): + pass + + else: + was_just_raised = trace.tb_next is None + + # It was not handled by any plugin, lets check exception breakpoints. + check_excs = [] + + # Note: check user unhandled before regular exceptions. + exc_break_user = main_debugger.get_exception_breakpoint( + exception, main_debugger.break_on_user_uncaught_exceptions) + if exc_break_user is not None: + check_excs.append((exc_break_user, True)) + + exc_break_caught = main_debugger.get_exception_breakpoint( + exception, main_debugger.break_on_caught_exceptions) + if exc_break_caught is not None: + check_excs.append((exc_break_caught, False)) + + for exc_break, is_user_uncaught in check_excs: + # Initially mark that it should stop and then go into exclusions. + should_stop = True + + if main_debugger.exclude_exception_by_filter(exc_break, trace): + pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name)) + should_stop = False + + elif exc_break.condition is not None and \ + not main_debugger.handle_breakpoint_condition(info, exc_break, frame): + should_stop = False + + elif is_user_uncaught: + # Note: we don't stop here, we just collect the exc_info to use later on... + should_stop = False + if not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) \ + and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)): + # User uncaught means that we're currently in user code but the code + # up the stack is library code. + exc_info = self.exc_info + if not exc_info: + exc_info = (arg, frame.f_lineno, set([frame.f_lineno])) + else: + lines = exc_info[2] + lines.add(frame.f_lineno) + exc_info = (arg, frame.f_lineno, lines) + self.exc_info = exc_info + else: + # I.e.: these are only checked if we're not dealing with user uncaught exceptions. + if exc_break.notify_on_first_raise_only and main_debugger.skip_on_exceptions_thrown_in_same_context \ + and not was_just_raised and not just_raised(trace.tb_next): + # In this case we never stop if it was just raised, so, to know if it was the first we + # need to check if we're in the 2nd method. + should_stop = False # I.e.: we stop only when we're at the caller of a method that throws an exception + + elif exc_break.notify_on_first_raise_only and not main_debugger.skip_on_exceptions_thrown_in_same_context \ + and not was_just_raised: + should_stop = False # I.e.: we stop only when it was just raised + + elif was_just_raised and main_debugger.skip_on_exceptions_thrown_in_same_context: + # Option: Don't break if an exception is caught in the same function from which it is thrown + should_stop = False + + if should_stop: + exception_breakpoint = exc_break + try: + info.pydev_message = exc_break.qname + except: + info.pydev_message = exc_break.qname.encode('utf-8') + break + + if should_stop: + # Always add exception to frame (must remove later after we proceed). + add_exception_to_frame(frame, (exception, value, trace)) + + if exception_breakpoint is not None and exception_breakpoint.expression is not None: + main_debugger.handle_breakpoint_expression(exception_breakpoint, info, frame) + + return should_stop, frame + + def handle_user_exception(self, frame): + exc_info = self.exc_info + if exc_info: + return self._handle_exception(frame, 'exception', exc_info[0], EXCEPTION_TYPE_USER_UNHANDLED) + return False + + # IFDEF CYTHON + # cdef _handle_exception(self, frame, str event, arg, str exception_type): + # cdef bint stopped; + # cdef tuple abs_real_path_and_base; + # cdef str absolute_filename; + # cdef str canonical_normalized_filename; + # cdef dict filename_to_lines_where_exceptions_are_ignored; + # cdef dict lines_ignored; + # cdef dict frame_id_to_frame; + # cdef dict merged; + # cdef object trace_obj; + # cdef object main_debugger; + # ELSE + def _handle_exception(self, frame, event, arg, exception_type): + # ENDIF + stopped = False + try: + # print('_handle_exception', frame.f_lineno, frame.f_code.co_name) + + # We have 3 things in arg: exception type, description, traceback object + trace_obj = arg[2] + main_debugger = self._args[0] + + initial_trace_obj = trace_obj + if trace_obj.tb_next is None and trace_obj.tb_frame is frame: + # I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + pass + else: + # Get the trace_obj from where the exception was raised... + while trace_obj.tb_next is not None: + trace_obj = trace_obj.tb_next + + if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + for check_trace_obj in (initial_trace_obj, trace_obj): + abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame) + absolute_filename = abs_real_path_and_base[0] + canonical_normalized_filename = abs_real_path_and_base[1] + + filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored + + lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + if lines_ignored is None: + lines_ignored = filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} + + try: + curr_stat = os.stat(absolute_filename) + curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + except: + curr_stat = None + + last_stat = self.filename_to_stat_info.get(absolute_filename) + if last_stat != curr_stat: + self.filename_to_stat_info[absolute_filename] = curr_stat + lines_ignored.clear() + try: + linecache.checkcache(absolute_filename) + except: + pydev_log.exception('Error in linecache.checkcache(%r)', absolute_filename) + + from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + if from_user_input: + merged = {} + merged.update(lines_ignored) + # Override what we have with the related entries that the user entered + merged.update(from_user_input) + else: + merged = lines_ignored + + exc_lineno = check_trace_obj.tb_lineno + + # print ('lines ignored', lines_ignored) + # print ('user input', from_user_input) + # print ('merged', merged, 'curr', exc_lineno) + + if exc_lineno not in merged: # Note: check on merged but update lines_ignored. + try: + line = linecache.getline(absolute_filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + except: + pydev_log.exception('Error in linecache.getline(%r, %s, f_globals)', absolute_filename, exc_lineno) + line = '' + + if IGNORE_EXCEPTION_TAG.match(line) is not None: + lines_ignored[exc_lineno] = 1 + return False + else: + # Put in the cache saying not to ignore + lines_ignored[exc_lineno] = 0 + else: + # Ok, dict has it already cached, so, let's check it... + if merged.get(exc_lineno, 0): + return False + + thread = self._args[3] + + try: + frame_id_to_frame = {} + frame_id_to_frame[id(frame)] = frame + f = trace_obj.tb_frame + while f is not None: + frame_id_to_frame[id(f)] = f + f = f.f_back + f = None + + stopped = True + main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + try: + self.set_suspend(thread, CMD_STEP_CAUGHT_EXCEPTION) + self.do_wait_suspend(thread, frame, event, arg, exception_type=exception_type) + finally: + main_debugger.send_caught_exception_stack_proceeded(thread) + except: + pydev_log.exception() + + main_debugger.set_trace_for_frame_and_parents(frame) + finally: + # Make sure the user cannot see the '__exception__' we added after we leave the suspend state. + remove_exception_from_frame(frame) + # Clear some local variables... + frame = None + trace_obj = None + initial_trace_obj = None + check_trace_obj = None + f = None + frame_id_to_frame = None + main_debugger = None + thread = None + + return stopped + + # IFDEF CYTHON + # cdef get_func_name(self, frame): + # cdef str func_name + # ELSE + def get_func_name(self, frame): + # ENDIF + code_obj = frame.f_code + func_name = code_obj.co_name + try: + cls_name = get_clsname_for_code(code_obj, frame) + if cls_name is not None: + return "%s.%s" % (cls_name, func_name) + else: + return func_name + except: + pydev_log.exception() + return func_name + + # IFDEF CYTHON + # cdef _show_return_values(self, frame, arg): + # ELSE + def _show_return_values(self, frame, arg): + # ENDIF + try: + try: + f_locals_back = getattr(frame.f_back, "f_locals", None) + if f_locals_back is not None: + return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + if return_values_dict is None: + return_values_dict = {} + f_locals_back[RETURN_VALUES_DICT] = return_values_dict + name = self.get_func_name(frame) + return_values_dict[name] = arg + except: + pydev_log.exception() + finally: + f_locals_back = None + + # IFDEF CYTHON + # cdef _remove_return_values(self, main_debugger, frame): + # ELSE + def _remove_return_values(self, main_debugger, frame): + # ENDIF + try: + try: + # Showing return values was turned off, we should remove them from locals dict. + # The values can be in the current frame or in the back one + frame.f_locals.pop(RETURN_VALUES_DICT, None) + + f_locals_back = getattr(frame.f_back, "f_locals", None) + if f_locals_back is not None: + f_locals_back.pop(RETURN_VALUES_DICT, None) + except: + pydev_log.exception() + finally: + f_locals_back = None + + # IFDEF CYTHON + # cdef _get_unfiltered_back_frame(self, main_debugger, frame): + # ELSE + def _get_unfiltered_back_frame(self, main_debugger, frame): + # ENDIF + f = frame.f_back + while f is not None: + if not main_debugger.is_files_filter_enabled: + return f + + else: + if main_debugger.apply_files_filter(f, f.f_code.co_filename, False): + f = f.f_back + + else: + return f + + return f + + # IFDEF CYTHON + # cdef _is_same_frame(self, target_frame, current_frame): + # cdef PyDBAdditionalThreadInfo info; + # ELSE + def _is_same_frame(self, target_frame, current_frame): + # ENDIF + if target_frame is current_frame: + return True + + info = self._args[2] + if info.pydev_use_scoped_step_frame: + # If using scoped step we don't check the target, we just need to check + # if the current matches the same heuristic where the target was defined. + if target_frame is not None and current_frame is not None: + if target_frame.f_code.co_filename == current_frame.f_code.co_filename: + # The co_name may be different (it may include the line number), but + # the filename must still be the same. + f = current_frame.f_back + if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + f = f.f_back + if f is not None and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + return True + + return False + + # IFDEF CYTHON + # cpdef trace_dispatch(self, frame, str event, arg): + # cdef tuple abs_path_canonical_path_and_base; + # cdef bint is_exception_event; + # cdef bint has_exception_breakpoints; + # cdef bint can_skip; + # cdef bint stop; + # cdef PyDBAdditionalThreadInfo info; + # cdef int step_cmd; + # cdef int line; + # cdef bint is_line; + # cdef bint is_call; + # cdef bint is_return; + # cdef bint should_stop; + # cdef dict breakpoints_for_file; + # cdef dict stop_info; + # cdef str curr_func_name; + # cdef bint exist_result; + # cdef dict frame_skips_cache; + # cdef object frame_cache_key; + # cdef tuple line_cache_key; + # cdef int breakpoints_in_line_cache; + # cdef int breakpoints_in_frame_cache; + # cdef bint has_breakpoint_in_frame; + # cdef bint is_coroutine_or_generator; + # cdef int bp_line; + # cdef object bp; + # cdef int pydev_smart_parent_offset + # cdef int pydev_smart_child_offset + # cdef tuple pydev_smart_step_into_variants + # ELSE + def trace_dispatch(self, frame, event, arg): + # ENDIF + # Note: this is a big function because most of the logic related to hitting a breakpoint and + # stepping is contained in it. Ideally this could be split among multiple functions, but the + # problem in this case is that in pure-python function calls are expensive and even more so + # when tracing is on (because each function call will get an additional tracing call). We + # try to address this by using the info.is_tracing for the fastest possible return, but the + # cost is still high (maybe we could use code-generation in the future and make the code + # generation be better split among what each part does). + + # DEBUG = '_debugger_case_generator.py' in frame.f_code.co_filename + main_debugger, abs_path_canonical_path_and_base, info, thread, frame_skips_cache, frame_cache_key = self._args + # if DEBUG: print('frame trace_dispatch %s %s %s %s %s %s, stop: %s' % (frame.f_lineno, frame.f_code.co_name, frame.f_code.co_filename, event, constant_to_str(info.pydev_step_cmd), arg, info.pydev_step_stop)) + try: + info.is_tracing += 1 + + # TODO: This shouldn't be needed. The fact that frame.f_lineno + # is None seems like a bug in Python 3.11. + # Reported in: https://github.com/python/cpython/issues/94485 + line = frame.f_lineno or 0 # Workaround or case where frame.f_lineno is None + line_cache_key = (frame_cache_key, line) + + if main_debugger.pydb_disposed: + return None if event == 'call' else NO_FTRACE + + plugin_manager = main_debugger.plugin + has_exception_breakpoints = ( + main_debugger.break_on_caught_exceptions + or main_debugger.break_on_user_uncaught_exceptions + or main_debugger.has_plugin_exception_breaks) + + stop_frame = info.pydev_step_stop + step_cmd = info.pydev_step_cmd + function_breakpoint_on_call_event = None + + if frame.f_code.co_flags & 0xa0: # 0xa0 == CO_GENERATOR = 0x20 | CO_COROUTINE = 0x80 + # Dealing with coroutines and generators: + # When in a coroutine we change the perceived event to the debugger because + # a call, StopIteration exception and return are usually just pausing/unpausing it. + if event == 'line': + is_line = True + is_call = False + is_return = False + is_exception_event = False + + elif event == 'return': + is_line = False + is_call = False + is_return = True + is_exception_event = False + + returns_cache_key = (frame_cache_key, 'returns') + return_lines = frame_skips_cache.get(returns_cache_key) + if return_lines is None: + # Note: we're collecting the return lines by inspecting the bytecode as + # there are multiple returns and multiple stop iterations when awaiting and + # it doesn't give any clear indication when a coroutine or generator is + # finishing or just pausing. + return_lines = set() + for x in main_debugger.collect_return_info(frame.f_code): + # Note: cython does not support closures in cpdefs (so we can't use + # a list comprehension). + return_lines.add(x.return_line) + + frame_skips_cache[returns_cache_key] = return_lines + + if line not in return_lines: + # Not really a return (coroutine/generator paused). + return self.trace_dispatch + else: + if self.exc_info: + self.handle_user_exception(frame) + return self.trace_dispatch + + # Tricky handling: usually when we're on a frame which is about to exit + # we set the step mode to step into, but in this case we'd end up in the + # asyncio internal machinery, which is not what we want, so, we just + # ask the stop frame to be a level up. + # + # Note that there's an issue here which we may want to fix in the future: if + # the back frame is a frame which is filtered, we won't stop properly. + # Solving this may not be trivial as we'd need to put a scope in the step + # in, but we may have to do it anyways to have a step in which doesn't end + # up in asyncio). + # + # Note2: we don't revert to a step in if we're doing scoped stepping + # (because on scoped stepping we're always receiving a call/line/return + # event for each line in ipython, so, we can't revert to step in on return + # as the return shouldn't mean that we've actually completed executing a + # frame in this case). + if stop_frame is frame and not info.pydev_use_scoped_step_frame: + if step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE, CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE): + f = self._get_unfiltered_back_frame(main_debugger, frame) + if f is not None: + info.pydev_step_cmd = CMD_STEP_INTO_COROUTINE + info.pydev_step_stop = f + else: + if step_cmd == CMD_STEP_OVER: + info.pydev_step_cmd = CMD_STEP_INTO + info.pydev_step_stop = None + + elif step_cmd == CMD_STEP_OVER_MY_CODE: + info.pydev_step_cmd = CMD_STEP_INTO_MY_CODE + info.pydev_step_stop = None + + elif step_cmd == CMD_STEP_INTO_COROUTINE: + # We're exiting this one, so, mark the new coroutine context. + f = self._get_unfiltered_back_frame(main_debugger, frame) + if f is not None: + info.pydev_step_stop = f + else: + info.pydev_step_cmd = CMD_STEP_INTO + info.pydev_step_stop = None + + elif event == 'exception': + breakpoints_for_file = None + if has_exception_breakpoints: + should_stop, frame = self._should_stop_on_exception(frame, event, arg) + if should_stop: + if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + return self.trace_dispatch + + return self.trace_dispatch + else: + # event == 'call' or event == 'c_XXX' + return self.trace_dispatch + + else: # Not coroutine nor generator + if event == 'line': + is_line = True + is_call = False + is_return = False + is_exception_event = False + + elif event == 'return': + is_line = False + is_return = True + is_call = False + is_exception_event = False + + # If we are in single step mode and something causes us to exit the current frame, we need to make sure we break + # eventually. Force the step mode to step into and the step stop frame to None. + # I.e.: F6 in the end of a function should stop in the next possible position (instead of forcing the user + # to make a step in or step over at that location). + # Note: this is especially troublesome when we're skipping code with the + # @DontTrace comment. + if ( + stop_frame is frame and + not info.pydev_use_scoped_step_frame and is_return and + step_cmd in (CMD_STEP_OVER, CMD_STEP_RETURN, CMD_STEP_OVER_MY_CODE, CMD_STEP_RETURN_MY_CODE, CMD_SMART_STEP_INTO) + ): + + if step_cmd in (CMD_STEP_OVER, CMD_STEP_RETURN, CMD_SMART_STEP_INTO): + info.pydev_step_cmd = CMD_STEP_INTO + else: + info.pydev_step_cmd = CMD_STEP_INTO_MY_CODE + info.pydev_step_stop = None + + if self.exc_info: + if self.handle_user_exception(frame): + return self.trace_dispatch + + elif event == 'call': + is_line = False + is_call = True + is_return = False + is_exception_event = False + if frame.f_code.co_firstlineno == frame.f_lineno: # Check line to deal with async/await. + function_breakpoint_on_call_event = main_debugger.function_breakpoint_name_to_breakpoint.get(frame.f_code.co_name) + + elif event == 'exception': + is_exception_event = True + breakpoints_for_file = None + if has_exception_breakpoints: + should_stop, frame = self._should_stop_on_exception(frame, event, arg) + if should_stop: + if self._handle_exception(frame, event, arg, EXCEPTION_TYPE_HANDLED): + return self.trace_dispatch + is_line = False + is_return = False + is_call = False + + else: + # Unexpected: just keep the same trace func (i.e.: event == 'c_XXX'). + return self.trace_dispatch + + if not is_exception_event: + breakpoints_for_file = main_debugger.breakpoints.get(abs_path_canonical_path_and_base[1]) + + can_skip = False + + if info.pydev_state == 1: # STATE_RUN = 1 + # we can skip if: + # - we have no stop marked + # - we should make a step return/step over and we're not in the current frame + # - we're stepping into a coroutine context and we're not in that context + if step_cmd == -1: + can_skip = True + + elif step_cmd in (CMD_STEP_OVER, CMD_STEP_RETURN, CMD_STEP_OVER_MY_CODE, CMD_STEP_RETURN_MY_CODE) and not self._is_same_frame(stop_frame, frame): + can_skip = True + + elif step_cmd == CMD_SMART_STEP_INTO and ( + stop_frame is not None and + stop_frame is not frame and + stop_frame is not frame.f_back and + (frame.f_back is None or stop_frame is not frame.f_back.f_back)): + can_skip = True + + elif step_cmd == CMD_STEP_INTO_MY_CODE: + if ( + main_debugger.apply_files_filter(frame, frame.f_code.co_filename, True) + and (frame.f_back is None or main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True)) + ): + can_skip = True + + elif step_cmd == CMD_STEP_INTO_COROUTINE: + f = frame + while f is not None: + if self._is_same_frame(stop_frame, f): + break + f = f.f_back + else: + can_skip = True + + if can_skip: + if plugin_manager is not None and ( + main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks): + can_skip = plugin_manager.can_skip(main_debugger, frame) + + if can_skip and main_debugger.show_return_values and info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and self._is_same_frame(stop_frame, frame.f_back): + # trace function for showing return values after step over + can_skip = False + + # Let's check to see if we are in a function that has a breakpoint. If we don't have a breakpoint, + # we will return nothing for the next trace + # also, after we hit a breakpoint and go to some other debugging state, we have to force the set trace anyway, + # so, that's why the additional checks are there. + + if function_breakpoint_on_call_event: + pass # Do nothing here (just keep on going as we can't skip it). + + elif not breakpoints_for_file: + if can_skip: + if has_exception_breakpoints: + return self.trace_exception + else: + return None if is_call else NO_FTRACE + + else: + # When cached, 0 means we don't have a breakpoint and 1 means we have. + if can_skip: + breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + if breakpoints_in_line_cache == 0: + return self.trace_dispatch + + breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + if breakpoints_in_frame_cache != -1: + # Gotten from cache. + has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 + + else: + has_breakpoint_in_frame = False + + try: + func_lines = set() + for offset_and_lineno in dis.findlinestarts(frame.f_code): + func_lines.add(offset_and_lineno[1]) + except: + # This is a fallback for implementations where we can't get the function + # lines -- i.e.: jython (in this case clients need to provide the function + # name to decide on the skip or we won't be able to skip the function + # completely). + + # Checks the breakpoint to see if there is a context match in some function. + curr_func_name = frame.f_code.co_name + + # global context is set with an empty name + if curr_func_name in ('?', '', ''): + curr_func_name = '' + + for bp in breakpoints_for_file.values(): + # will match either global or some function + if bp.func_name in ('None', curr_func_name): + has_breakpoint_in_frame = True + break + else: + for bp_line in breakpoints_for_file: # iterate on keys + if bp_line in func_lines: + has_breakpoint_in_frame = True + break + + # Cache the value (1 or 0 or -1 for default because of cython). + if has_breakpoint_in_frame: + frame_skips_cache[frame_cache_key] = 1 + else: + frame_skips_cache[frame_cache_key] = 0 + + if can_skip and not has_breakpoint_in_frame: + if has_exception_breakpoints: + return self.trace_exception + else: + return None if is_call else NO_FTRACE + + # We may have hit a breakpoint or we are already in step mode. Either way, let's check what we should do in this frame + # if DEBUG: print('NOT skipped: %s %s %s %s' % (frame.f_lineno, frame.f_code.co_name, event, frame.__class__.__name__)) + + try: + flag = False + # return is not taken into account for breakpoint hit because we'd have a double-hit in this case + # (one for the line and the other for the return). + + stop_info = {} + breakpoint = None + exist_result = False + stop = False + stop_reason = CMD_SET_BREAK + bp_type = None + + if function_breakpoint_on_call_event: + breakpoint = function_breakpoint_on_call_event + stop = True + new_frame = frame + stop_reason = CMD_SET_FUNCTION_BREAK + + elif is_line and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file: + breakpoint = breakpoints_for_file[line] + new_frame = frame + stop = True + if step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and (self._is_same_frame(stop_frame, frame) and is_line): + stop = False # we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + if result: + exist_result = True + flag, breakpoint, new_frame, bp_type = result + + if breakpoint: + # ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + # lets do the conditional stuff here + if breakpoint.expression is not None: + main_debugger.handle_breakpoint_expression(breakpoint, info, new_frame) + if breakpoint.is_logpoint and info.pydev_message is not None and len(info.pydev_message) > 0: + cmd = main_debugger.cmd_factory.make_io_message(info.pydev_message + os.linesep, '1') + main_debugger.writer.add_command(cmd) + + if stop or exist_result: + eval_result = False + if breakpoint.has_condition: + eval_result = main_debugger.handle_breakpoint_condition(info, breakpoint, new_frame) + + if breakpoint.has_condition: + if not eval_result: + stop = False + elif breakpoint.is_logpoint: + stop = False + + if is_call and (frame.f_code.co_name in ('', '') or (line == 1 and frame.f_code.co_name.startswith(' may be executed having each line compiled as a new + # module, so it's the same case as . + + return self.trace_dispatch + + if main_debugger.show_return_values: + if is_return and ( + (info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE, CMD_SMART_STEP_INTO) and (self._is_same_frame(stop_frame, frame.f_back))) or + (info.pydev_step_cmd in (CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE) and (self._is_same_frame(stop_frame, frame))) or + (info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_COROUTINE)) or + ( + info.pydev_step_cmd == CMD_STEP_INTO_MY_CODE + and frame.f_back is not None + and not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, True) + ) + ): + self._show_return_values(frame, arg) + + elif main_debugger.remove_return_values_flag: + try: + self._remove_return_values(main_debugger, frame) + finally: + main_debugger.remove_return_values_flag = False + + if stop: + self.set_suspend( + thread, + stop_reason, + suspend_other_threads=breakpoint and breakpoint.suspend_policy == "ALL", + ) + + elif flag and plugin_manager is not None: + result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + if result: + frame = result + + # if thread has a suspend flag, we suspend with a busy wait + if info.pydev_state == STATE_SUSPEND: + self.do_wait_suspend(thread, frame, event, arg) + return self.trace_dispatch + else: + if not breakpoint and is_line: + # No stop from anyone and no breakpoint found in line (cache that). + frame_skips_cache[line_cache_key] = 0 + + except: + pydev_log.exception() + raise + + # step handling. We stop when we hit the right frame + try: + should_skip = 0 + if pydevd_dont_trace.should_trace_hook is not None: + if self.should_skip == -1: + # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + # Which will be handled by this frame is read-only, so, we can cache it safely. + if not pydevd_dont_trace.should_trace_hook(frame, abs_path_canonical_path_and_base[0]): + # -1, 0, 1 to be Cython-friendly + should_skip = self.should_skip = 1 + else: + should_skip = self.should_skip = 0 + else: + should_skip = self.should_skip + + plugin_stop = False + if should_skip: + stop = False + + elif step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE): + force_check_project_scope = step_cmd == CMD_STEP_INTO_MY_CODE + if is_line: + if not info.pydev_use_scoped_step_frame: + if force_check_project_scope or main_debugger.is_files_filter_enabled: + stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope) + else: + stop = True + else: + # We can only stop inside the ipython call. + filename = frame.f_code.co_filename + if filename.endswith('.pyc'): + filename = filename[:-1] + + if not filename.endswith(PYDEVD_IPYTHON_CONTEXT[0]): + f = frame.f_back + while f is not None: + if f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + f2 = f.f_back + if f2 is not None and f2.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + pydev_log.debug('Stop inside ipython call') + stop = True + break + f = f.f_back + + del f + + if not stop: + # In scoped mode if step in didn't work in this context it won't work + # afterwards anyways. + return None if is_call else NO_FTRACE + + elif is_return and frame.f_back is not None and not info.pydev_use_scoped_step_frame: + if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + stop = False + else: + if force_check_project_scope or main_debugger.is_files_filter_enabled: + stop = not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, force_check_project_scope) + if stop: + # Prevent stopping in a return to the same location we were initially + # (i.e.: double-stop at the same place due to some filtering). + if info.step_in_initial_location == (frame.f_back, frame.f_back.f_lineno): + stop = False + else: + stop = True + else: + stop = False + + if stop: + if step_cmd == CMD_STEP_INTO_COROUTINE: + # i.e.: Check if we're stepping into the proper context. + f = frame + while f is not None: + if self._is_same_frame(stop_frame, f): + break + f = f.f_back + else: + stop = False + + if plugin_manager is not None: + result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + if result: + stop, plugin_stop = result + + elif step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE): + # Note: when dealing with a step over my code it's the same as a step over (the + # difference is that when we return from a frame in one we go to regular step + # into and in the other we go to a step into my code). + stop = self._is_same_frame(stop_frame, frame) and is_line + # Note: don't stop on a return for step over, only for line events + # i.e.: don't stop in: (stop_frame is frame.f_back and is_return) as we'd stop twice in that line. + + if plugin_manager is not None: + result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + if result: + stop, plugin_stop = result + + elif step_cmd == CMD_SMART_STEP_INTO: + stop = False + back = frame.f_back + if self._is_same_frame(stop_frame, frame) and is_return: + # We're exiting the smart step into initial frame (so, we probably didn't find our target). + stop = True + + elif self._is_same_frame(stop_frame, back) and is_line: + if info.pydev_smart_child_offset != -1: + # i.e.: in this case, we're not interested in the pause in the parent, rather + # we're interested in the pause in the child (when the parent is at the proper place). + stop = False + + else: + pydev_smart_parent_offset = info.pydev_smart_parent_offset + + pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: + # Preferred mode (when the smart step into variants are available + # and the offset is set). + stop = get_smart_step_into_variant_from_frame_offset(back.f_lasti, pydev_smart_step_into_variants) is \ + get_smart_step_into_variant_from_frame_offset(pydev_smart_parent_offset, pydev_smart_step_into_variants) + + else: + # Only the name/line is available, so, check that. + curr_func_name = frame.f_code.co_name + + # global context is set with an empty name + if curr_func_name in ('?', '') or curr_func_name is None: + curr_func_name = '' + if curr_func_name == info.pydev_func_name and stop_frame.f_lineno == info.pydev_next_line: + stop = True + + if not stop: + # In smart step into, if we didn't hit it in this frame once, that'll + # not be the case next time either, so, disable tracing for this frame. + return None if is_call else NO_FTRACE + + elif back is not None and self._is_same_frame(stop_frame, back.f_back) and is_line: + # Ok, we have to track 2 stops at this point, the parent and the child offset. + # This happens when handling a step into which targets a function inside a list comprehension + # or generator (in which case an intermediary frame is created due to an internal function call). + pydev_smart_parent_offset = info.pydev_smart_parent_offset + pydev_smart_child_offset = info.pydev_smart_child_offset + # print('matched back frame', pydev_smart_parent_offset, pydev_smart_child_offset) + # print('parent f_lasti', back.f_back.f_lasti) + # print('child f_lasti', back.f_lasti) + stop = False + if pydev_smart_child_offset >= 0 and pydev_smart_child_offset >= 0: + pydev_smart_step_into_variants = info.pydev_smart_step_into_variants + + if pydev_smart_parent_offset >= 0 and pydev_smart_step_into_variants: + # Note that we don't really check the parent offset, only the offset of + # the child (because this is a generator, the parent may have moved forward + # already -- and that's ok, so, we just check that the parent frame + # matches in this case). + smart_step_into_variant = get_smart_step_into_variant_from_frame_offset(pydev_smart_parent_offset, pydev_smart_step_into_variants) + # print('matched parent offset', pydev_smart_parent_offset) + # Ok, now, check the child variant + children_variants = smart_step_into_variant.children_variants + stop = children_variants and ( + get_smart_step_into_variant_from_frame_offset(back.f_lasti, children_variants) is \ + get_smart_step_into_variant_from_frame_offset(pydev_smart_child_offset, children_variants) + ) + # print('stop at child', stop) + + if not stop: + # In smart step into, if we didn't hit it in this frame once, that'll + # not be the case next time either, so, disable tracing for this frame. + return None if is_call else NO_FTRACE + + elif step_cmd in (CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE): + stop = is_return and self._is_same_frame(stop_frame, frame) + + else: + stop = False + + if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): + f_code = getattr(frame.f_back, 'f_code', None) + if f_code is not None: + if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE: + stop = False + + if plugin_stop: + stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + elif stop: + if is_line: + self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + self.do_wait_suspend(thread, frame, event, arg) + elif is_return: # return event + back = frame.f_back + if back is not None: + # When we get to the pydevd run function, the debugging has actually finished for the main thread + # (note that it can still go on for other threads, but for this one, we just make it finish) + # So, just setting it to None should be OK + back_absolute_filename, _, base = get_abs_path_real_path_and_base_from_frame(back) + if (base, back.f_code.co_name) in (DEBUG_START, DEBUG_START_PY3K): + back = None + + elif base == TRACE_PROPERTY: + # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + # if we're in a return, we want it to appear to the user in the previous frame! + return None if is_call else NO_FTRACE + + elif pydevd_dont_trace.should_trace_hook is not None: + if not pydevd_dont_trace.should_trace_hook(back, back_absolute_filename): + # In this case, we'll have to skip the previous one because it shouldn't be traced. + # Also, we have to reset the tracing, because if the parent's parent (or some + # other parent) has to be traced and it's not currently, we wouldn't stop where + # we should anymore (so, a step in/over/return may not stop anywhere if no parent is traced). + # Related test: _debugger_case17a.py + main_debugger.set_trace_for_frame_and_parents(back) + return None if is_call else NO_FTRACE + + if back is not None: + # if we're in a return, we want it to appear to the user in the previous frame! + self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd) + self.do_wait_suspend(thread, back, event, arg) + else: + # in jython we may not have a back frame + info.pydev_step_stop = None + info.pydev_original_step_cmd = -1 + info.pydev_step_cmd = -1 + info.pydev_state = STATE_RUN + + except KeyboardInterrupt: + raise + except: + try: + pydev_log.exception() + info.pydev_original_step_cmd = -1 + info.pydev_step_cmd = -1 + info.pydev_step_stop = None + except: + return None if is_call else NO_FTRACE + + # if we are quitting, let's stop the tracing + if main_debugger.quitting: + return None if is_call else NO_FTRACE + + return self.trace_dispatch + finally: + info.is_tracing -= 1 + + # end trace_dispatch diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame_utils.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame_utils.py new file mode 120000 index 0000000..4c87a31 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame_utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8e/b6/ac/b8aed651b2286e402c9d005c0d4662a7a84a82823cd0c3a9a8d918a4ba \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_gevent_integration.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_gevent_integration.py new file mode 100644 index 0000000..ca404ba --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_gevent_integration.py @@ -0,0 +1,93 @@ +import pydevd_tracing +import greenlet +import gevent +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_custom_frames import add_custom_frame, update_custom_frame, remove_custom_frame +from _pydevd_bundle.pydevd_constants import GEVENT_SHOW_PAUSED_GREENLETS, get_global_debugger, \ + thread_get_ident +from _pydev_bundle import pydev_log +from pydevd_file_utils import basename + +_saved_greenlets_to_custom_frame_thread_id = {} + +if GEVENT_SHOW_PAUSED_GREENLETS: + + def _get_paused_name(py_db, g): + frame = g.gr_frame + use_frame = frame + + # i.e.: Show in the description of the greenlet the last user-code found. + while use_frame is not None: + if py_db.apply_files_filter(use_frame, use_frame.f_code.co_filename, True): + frame = use_frame + use_frame = use_frame.f_back + else: + break + + if use_frame is None: + use_frame = frame + + return '%s: %s - %s' % (type(g).__name__, use_frame.f_code.co_name, basename(use_frame.f_code.co_filename)) + + def greenlet_events(event, args): + if event in ('switch', 'throw'): + py_db = get_global_debugger() + origin, target = args + + if not origin.dead and origin.gr_frame is not None: + frame_custom_thread_id = _saved_greenlets_to_custom_frame_thread_id.get(origin) + if frame_custom_thread_id is None: + _saved_greenlets_to_custom_frame_thread_id[origin] = add_custom_frame( + origin.gr_frame, _get_paused_name(py_db, origin), thread_get_ident()) + else: + update_custom_frame( + frame_custom_thread_id, origin.gr_frame, _get_paused_name(py_db, origin), thread_get_ident()) + else: + frame_custom_thread_id = _saved_greenlets_to_custom_frame_thread_id.pop(origin, None) + if frame_custom_thread_id is not None: + remove_custom_frame(frame_custom_thread_id) + + # This one will be resumed, so, remove custom frame from it. + frame_custom_thread_id = _saved_greenlets_to_custom_frame_thread_id.pop(target, None) + if frame_custom_thread_id is not None: + remove_custom_frame(frame_custom_thread_id) + + # The tracing needs to be reapplied for each greenlet as gevent + # clears the tracing set through sys.settrace for each greenlet. + pydevd_tracing.reapply_settrace() + +else: + + # i.e.: no logic related to showing paused greenlets is needed. + def greenlet_events(event, args): + pydevd_tracing.reapply_settrace() + + +def enable_gevent_integration(): + # References: + # https://greenlet.readthedocs.io/en/latest/api.html#greenlet.settrace + # https://greenlet.readthedocs.io/en/latest/tracing.html + + # Note: gevent.version_info is WRONG (gevent.__version__ must be used). + try: + if tuple(int(x) for x in gevent.__version__.split('.')[:2]) <= (20, 0): + if not GEVENT_SHOW_PAUSED_GREENLETS: + return + + if not hasattr(greenlet, 'settrace'): + # In older versions it was optional. + # We still try to use if available though (because without it + pydev_log.debug('greenlet.settrace not available. GEVENT_SHOW_PAUSED_GREENLETS will have no effect.') + return + try: + greenlet.settrace(greenlet_events) + except: + pydev_log.exception('Error with greenlet.settrace.') + except: + pydev_log.exception('Error setting up gevent %s.', gevent.__version__) + + +def log_gevent_debug_info(): + pydev_log.debug('Greenlet version: %s', greenlet.__version__) + pydev_log.debug('Gevent version: %s', gevent.__version__) + pydev_log.debug('Gevent install location: %s', gevent.__file__) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_import_class.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_import_class.py new file mode 120000 index 0000000..aaf293b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_import_class.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5e/28/08/3186d583a22c358dec84771af91dbabc61b9c0c4c6ca0772877f2f694f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_io.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_io.py new file mode 100644 index 0000000..3682c4d --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_io.py @@ -0,0 +1,258 @@ +from _pydevd_bundle.pydevd_constants import ForkSafeLock, get_global_debugger +import os +import sys +from contextlib import contextmanager + + +class IORedirector: + ''' + This class works to wrap a stream (stdout/stderr) with an additional redirect. + ''' + + def __init__(self, original, new_redirect, wrap_buffer=False): + ''' + :param stream original: + The stream to be wrapped (usually stdout/stderr, but could be None). + + :param stream new_redirect: + Usually IOBuf (below). + + :param bool wrap_buffer: + Whether to create a buffer attribute (needed to mimick python 3 s + tdout/stderr which has a buffer to write binary data). + ''' + self._lock = ForkSafeLock(rlock=True) + self._writing = False + self._redirect_to = (original, new_redirect) + if wrap_buffer and hasattr(original, 'buffer'): + self.buffer = IORedirector(original.buffer, new_redirect.buffer, False) + + def write(self, s): + # Note that writing to the original stream may fail for some reasons + # (such as trying to write something that's not a string or having it closed). + with self._lock: + if self._writing: + return + self._writing = True + try: + for r in self._redirect_to: + if hasattr(r, 'write'): + r.write(s) + finally: + self._writing = False + + def isatty(self): + for r in self._redirect_to: + if hasattr(r, 'isatty'): + return r.isatty() + return False + + def flush(self): + for r in self._redirect_to: + if hasattr(r, 'flush'): + r.flush() + + def __getattr__(self, name): + for r in self._redirect_to: + if hasattr(r, name): + return getattr(r, name) + raise AttributeError(name) + + +class RedirectToPyDBIoMessages(object): + + def __init__(self, out_ctx, wrap_stream, wrap_buffer, on_write=None): + ''' + :param out_ctx: + 1=stdout and 2=stderr + + :param wrap_stream: + Either sys.stdout or sys.stderr. + + :param bool wrap_buffer: + If True the buffer attribute (which wraps writing bytes) should be + wrapped. + + :param callable(str) on_write: + May be a custom callable to be called when to write something. + If not passed the default implementation will create an io message + and send it through the debugger. + ''' + encoding = getattr(wrap_stream, 'encoding', None) + if not encoding: + encoding = os.environ.get('PYTHONIOENCODING', 'utf-8') + self.encoding = encoding + self._out_ctx = out_ctx + if wrap_buffer: + self.buffer = RedirectToPyDBIoMessages(out_ctx, wrap_stream, wrap_buffer=False, on_write=on_write) + self._on_write = on_write + + def get_pydb(self): + # Note: separate method for mocking on tests. + return get_global_debugger() + + def flush(self): + pass # no-op here + + def write(self, s): + if self._on_write is not None: + self._on_write(s) + return + + if s: + # Need s in str + if isinstance(s, bytes): + s = s.decode(self.encoding, errors='replace') + + py_db = self.get_pydb() + if py_db is not None: + # Note that the actual message contents will be a xml with utf-8, although + # the entry is str on py3 and bytes on py2. + cmd = py_db.cmd_factory.make_io_message(s, self._out_ctx) + if py_db.writer is not None: + py_db.writer.add_command(cmd) + + +class IOBuf: + '''This class works as a replacement for stdio and stderr. + It is a buffer and when its contents are requested, it will erase what + it has so far so that the next return will not return the same contents again. + ''' + + def __init__(self): + self.buflist = [] + import os + self.encoding = os.environ.get('PYTHONIOENCODING', 'utf-8') + + def getvalue(self): + b = self.buflist + self.buflist = [] # clear it + return ''.join(b) # bytes on py2, str on py3. + + def write(self, s): + if isinstance(s, bytes): + s = s.decode(self.encoding, errors='replace') + self.buflist.append(s) + + def isatty(self): + return False + + def flush(self): + pass + + def empty(self): + return len(self.buflist) == 0 + + +class _RedirectInfo(object): + + def __init__(self, original, redirect_to): + self.original = original + self.redirect_to = redirect_to + + +class _RedirectionsHolder: + _lock = ForkSafeLock(rlock=True) + _stack_stdout = [] + _stack_stderr = [] + + _pydevd_stdout_redirect_ = None + _pydevd_stderr_redirect_ = None + + +def start_redirect(keep_original_redirection=False, std='stdout', redirect_to=None): + ''' + @param std: 'stdout', 'stderr', or 'both' + ''' + with _RedirectionsHolder._lock: + if redirect_to is None: + redirect_to = IOBuf() + + if std == 'both': + config_stds = ['stdout', 'stderr'] + else: + config_stds = [std] + + for std in config_stds: + original = getattr(sys, std) + stack = getattr(_RedirectionsHolder, '_stack_%s' % std) + + if keep_original_redirection: + wrap_buffer = True if hasattr(redirect_to, 'buffer') else False + new_std_instance = IORedirector(getattr(sys, std), redirect_to, wrap_buffer=wrap_buffer) + setattr(sys, std, new_std_instance) + else: + new_std_instance = redirect_to + setattr(sys, std, redirect_to) + + stack.append(_RedirectInfo(original, new_std_instance)) + + return redirect_to + + +def end_redirect(std='stdout'): + with _RedirectionsHolder._lock: + if std == 'both': + config_stds = ['stdout', 'stderr'] + else: + config_stds = [std] + for std in config_stds: + stack = getattr(_RedirectionsHolder, '_stack_%s' % std) + redirect_info = stack.pop() + setattr(sys, std, redirect_info.original) + + +def redirect_stream_to_pydb_io_messages(std): + ''' + :param std: + 'stdout' or 'stderr' + ''' + with _RedirectionsHolder._lock: + redirect_to_name = '_pydevd_%s_redirect_' % (std,) + if getattr(_RedirectionsHolder, redirect_to_name) is None: + wrap_buffer = True + original = getattr(sys, std) + + redirect_to = RedirectToPyDBIoMessages(1 if std == 'stdout' else 2, original, wrap_buffer) + start_redirect(keep_original_redirection=True, std=std, redirect_to=redirect_to) + + stack = getattr(_RedirectionsHolder, '_stack_%s' % std) + setattr(_RedirectionsHolder, redirect_to_name, stack[-1]) + return True + + return False + + +def stop_redirect_stream_to_pydb_io_messages(std): + ''' + :param std: + 'stdout' or 'stderr' + ''' + with _RedirectionsHolder._lock: + redirect_to_name = '_pydevd_%s_redirect_' % (std,) + redirect_info = getattr(_RedirectionsHolder, redirect_to_name) + if redirect_info is not None: # :type redirect_info: _RedirectInfo + setattr(_RedirectionsHolder, redirect_to_name, None) + + stack = getattr(_RedirectionsHolder, '_stack_%s' % std) + prev_info = stack.pop() + + curr = getattr(sys, std) + if curr is redirect_info.redirect_to: + setattr(sys, std, redirect_info.original) + + +@contextmanager +def redirect_stream_to_pydb_io_messages_context(): + with _RedirectionsHolder._lock: + redirecting = [] + for std in ('stdout', 'stderr'): + if redirect_stream_to_pydb_io_messages(std): + redirecting.append(std) + + try: + yield + finally: + for std in redirecting: + stop_redirect_stream_to_pydb_io_messages(std) + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_json_debug_options.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_json_debug_options.py new file mode 100644 index 0000000..4923268 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_json_debug_options.py @@ -0,0 +1,196 @@ +import json +import urllib.parse as urllib_parse + + +class DebugOptions(object): + + __slots__ = [ + 'just_my_code', + 'redirect_output', + 'show_return_value', + 'break_system_exit_zero', + 'django_debug', + 'flask_debug', + 'stop_on_entry', + 'max_exception_stack_frames', + 'gui_event_loop', + ] + + def __init__(self): + self.just_my_code = True + self.redirect_output = False + self.show_return_value = False + self.break_system_exit_zero = False + self.django_debug = False + self.flask_debug = False + self.stop_on_entry = False + self.max_exception_stack_frames = 0 + self.gui_event_loop = 'matplotlib' + + def to_json(self): + dct = {} + for s in self.__slots__: + dct[s] = getattr(self, s) + return json.dumps(dct) + + def update_fom_debug_options(self, debug_options): + if 'DEBUG_STDLIB' in debug_options: + self.just_my_code = not debug_options.get('DEBUG_STDLIB') + + if 'REDIRECT_OUTPUT' in debug_options: + self.redirect_output = debug_options.get('REDIRECT_OUTPUT') + + if 'SHOW_RETURN_VALUE' in debug_options: + self.show_return_value = debug_options.get('SHOW_RETURN_VALUE') + + if 'BREAK_SYSTEMEXIT_ZERO' in debug_options: + self.break_system_exit_zero = debug_options.get('BREAK_SYSTEMEXIT_ZERO') + + if 'DJANGO_DEBUG' in debug_options: + self.django_debug = debug_options.get('DJANGO_DEBUG') + + if 'FLASK_DEBUG' in debug_options: + self.flask_debug = debug_options.get('FLASK_DEBUG') + + if 'STOP_ON_ENTRY' in debug_options: + self.stop_on_entry = debug_options.get('STOP_ON_ENTRY') + + # Note: _max_exception_stack_frames cannot be set by debug options. + + def update_from_args(self, args): + if 'justMyCode' in args: + self.just_my_code = bool_parser(args['justMyCode']) + else: + # i.e.: if justMyCode is provided, don't check the deprecated value + if 'debugStdLib' in args: + self.just_my_code = not bool_parser(args['debugStdLib']) + + if 'redirectOutput' in args: + self.redirect_output = bool_parser(args['redirectOutput']) + + if 'showReturnValue' in args: + self.show_return_value = bool_parser(args['showReturnValue']) + + if 'breakOnSystemExitZero' in args: + self.break_system_exit_zero = bool_parser(args['breakOnSystemExitZero']) + + if 'django' in args: + self.django_debug = bool_parser(args['django']) + + if 'flask' in args: + self.flask_debug = bool_parser(args['flask']) + + if 'jinja' in args: + self.flask_debug = bool_parser(args['jinja']) + + if 'stopOnEntry' in args: + self.stop_on_entry = bool_parser(args['stopOnEntry']) + + self.max_exception_stack_frames = int_parser(args.get('maxExceptionStackFrames', 0)) + + if 'guiEventLoop' in args: + self.gui_event_loop = str(args['guiEventLoop']) + + +def int_parser(s, default_value=0): + try: + return int(s) + except Exception: + return default_value + + +def bool_parser(s): + return s in ("True", "true", "1", True, 1) + + +def unquote(s): + return None if s is None else urllib_parse.unquote(s) + + +DEBUG_OPTIONS_PARSER = { + 'WAIT_ON_ABNORMAL_EXIT': bool_parser, + 'WAIT_ON_NORMAL_EXIT': bool_parser, + 'BREAK_SYSTEMEXIT_ZERO': bool_parser, + 'REDIRECT_OUTPUT': bool_parser, + 'DJANGO_DEBUG': bool_parser, + 'FLASK_DEBUG': bool_parser, + 'FIX_FILE_PATH_CASE': bool_parser, + 'CLIENT_OS_TYPE': unquote, + 'DEBUG_STDLIB': bool_parser, + 'STOP_ON_ENTRY': bool_parser, + 'SHOW_RETURN_VALUE': bool_parser, + 'MULTIPROCESS': bool_parser, +} + +DEBUG_OPTIONS_BY_FLAG = { + 'RedirectOutput': 'REDIRECT_OUTPUT=True', + 'WaitOnNormalExit': 'WAIT_ON_NORMAL_EXIT=True', + 'WaitOnAbnormalExit': 'WAIT_ON_ABNORMAL_EXIT=True', + 'BreakOnSystemExitZero': 'BREAK_SYSTEMEXIT_ZERO=True', + 'Django': 'DJANGO_DEBUG=True', + 'Flask': 'FLASK_DEBUG=True', + 'Jinja': 'FLASK_DEBUG=True', + 'FixFilePathCase': 'FIX_FILE_PATH_CASE=True', + 'DebugStdLib': 'DEBUG_STDLIB=True', + 'WindowsClient': 'CLIENT_OS_TYPE=WINDOWS', + 'UnixClient': 'CLIENT_OS_TYPE=UNIX', + 'StopOnEntry': 'STOP_ON_ENTRY=True', + 'ShowReturnValue': 'SHOW_RETURN_VALUE=True', + 'Multiprocess': 'MULTIPROCESS=True', +} + + +def _build_debug_options(flags): + """Build string representation of debug options from the launch config.""" + return ';'.join(DEBUG_OPTIONS_BY_FLAG[flag] + for flag in flags or [] + if flag in DEBUG_OPTIONS_BY_FLAG) + + +def _parse_debug_options(opts): + """Debug options are semicolon separated key=value pairs + """ + options = {} + if not opts: + return options + + for opt in opts.split(';'): + try: + key, value = opt.split('=') + except ValueError: + continue + try: + options[key] = DEBUG_OPTIONS_PARSER[key](value) + except KeyError: + continue + + return options + + +def _extract_debug_options(opts, flags=None): + """Return the debug options encoded in the given value. + + "opts" is a semicolon-separated string of "key=value" pairs. + "flags" is a list of strings. + + If flags is provided then it is used as a fallback. + + The values come from the launch config: + + { + type:'python', + request:'launch'|'attach', + name:'friendly name for debug config', + debugOptions:[ + 'RedirectOutput', 'Django' + ], + options:'REDIRECT_OUTPUT=True;DJANGO_DEBUG=True' + } + + Further information can be found here: + + https://code.visualstudio.com/docs/editor/debugging#_launchjson-attributes + """ + if not opts: + opts = _build_debug_options(flags) + return _parse_debug_options(opts) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command.py new file mode 100644 index 0000000..506f5fd --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command.py @@ -0,0 +1,146 @@ +from _pydevd_bundle.pydevd_constants import DebugInfoHolder, \ + get_global_debugger, GetGlobalDebugger, set_global_debugger # Keep for backward compatibility @UnusedImport +from _pydevd_bundle.pydevd_utils import quote_smart as quote, to_string +from _pydevd_bundle.pydevd_comm_constants import ID_TO_MEANING, CMD_EXIT +from _pydevd_bundle.pydevd_constants import HTTP_PROTOCOL, HTTP_JSON_PROTOCOL, \ + get_protocol, IS_JYTHON, ForkSafeLock +import json +from _pydev_bundle import pydev_log + + +class _BaseNetCommand(object): + + # Command id. Should be set in instance. + id = -1 + + # Dict representation of the command to be set in instance. Only set for json commands. + as_dict = None + + def send(self, *args, **kwargs): + pass + + def call_after_send(self, callback): + pass + + +class _NullNetCommand(_BaseNetCommand): + pass + + +class _NullExitCommand(_NullNetCommand): + + id = CMD_EXIT + + +# Constant meant to be passed to the writer when the command is meant to be ignored. +NULL_NET_COMMAND = _NullNetCommand() + +# Exit command -- only internal (we don't want/need to send this to the IDE). +NULL_EXIT_COMMAND = _NullExitCommand() + + +class NetCommand(_BaseNetCommand): + """ + Commands received/sent over the network. + + Command can represent command received from the debugger, + or one to be sent by daemon. + """ + next_seq = 0 # sequence numbers + + _showing_debug_info = 0 + _show_debug_info_lock = ForkSafeLock(rlock=True) + + _after_send = None + + def __init__(self, cmd_id, seq, text, is_json=False): + """ + If sequence is 0, new sequence will be generated (otherwise, this was the response + to a command from the client). + """ + protocol = get_protocol() + self.id = cmd_id + if seq == 0: + NetCommand.next_seq += 2 + seq = NetCommand.next_seq + + self.seq = seq + + if is_json: + if hasattr(text, 'to_dict'): + as_dict = text.to_dict(update_ids_to_dap=True) + else: + assert isinstance(text, dict) + as_dict = text + as_dict['pydevd_cmd_id'] = cmd_id + as_dict['seq'] = seq + self.as_dict = as_dict + text = json.dumps(as_dict) + + assert isinstance(text, str) + + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1: + self._show_debug_info(cmd_id, seq, text) + + if is_json: + msg = text + else: + if protocol not in (HTTP_PROTOCOL, HTTP_JSON_PROTOCOL): + encoded = quote(to_string(text), '/<>_=" \t') + msg = '%s\t%s\t%s\n' % (cmd_id, seq, encoded) + + else: + msg = '%s\t%s\t%s' % (cmd_id, seq, text) + + if isinstance(msg, str): + msg = msg.encode('utf-8') + + assert isinstance(msg, bytes) + as_bytes = msg + self._as_bytes = as_bytes + + def send(self, sock): + as_bytes = self._as_bytes + try: + if get_protocol() in (HTTP_PROTOCOL, HTTP_JSON_PROTOCOL): + sock.sendall(('Content-Length: %s\r\n\r\n' % len(as_bytes)).encode('ascii')) + sock.sendall(as_bytes) + if self._after_send: + for method in self._after_send: + method(sock) + except: + if IS_JYTHON: + # Ignore errors in sock.sendall in Jython (seems to be common for Jython to + # give spurious exceptions at interpreter shutdown here). + pass + else: + raise + + def call_after_send(self, callback): + if not self._after_send: + self._after_send = [callback] + else: + self._after_send.append(callback) + + @classmethod + def _show_debug_info(cls, cmd_id, seq, text): + with cls._show_debug_info_lock: + # Only one thread each time (rlock). + if cls._showing_debug_info: + # avoid recursing in the same thread (just printing could create + # a new command when redirecting output). + return + + cls._showing_debug_info += 1 + try: + out_message = 'sending cmd (%s) --> ' % (get_protocol(),) + out_message += "%20s" % ID_TO_MEANING.get(str(cmd_id), 'UNKNOWN') + out_message += ' ' + out_message += text.replace('\n', ' ') + try: + pydev_log.critical('%s\n', out_message) + except: + pass + finally: + cls._showing_debug_info -= 1 + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_json.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_json.py new file mode 100644 index 0000000..a73b189 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_json.py @@ -0,0 +1,474 @@ +from functools import partial +import itertools +import os +import sys +import socket as socket_module + +from _pydev_bundle._pydev_imports_tipper import TYPE_IMPORT, TYPE_CLASS, TYPE_FUNCTION, TYPE_ATTR, \ + TYPE_BUILTIN, TYPE_PARAM +from _pydev_bundle.pydev_is_thread_alive import is_thread_alive +from _pydev_bundle.pydev_override import overrides +from _pydevd_bundle._debug_adapter import pydevd_schema +from _pydevd_bundle._debug_adapter.pydevd_schema import ModuleEvent, ModuleEventBody, Module, \ + OutputEventBody, OutputEvent, ContinuedEventBody, ExitedEventBody, \ + ExitedEvent +from _pydevd_bundle.pydevd_comm_constants import CMD_THREAD_CREATE, CMD_RETURN, CMD_MODULE_EVENT, \ + CMD_WRITE_TO_CONSOLE, CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE, \ + CMD_STEP_RETURN, CMD_STEP_CAUGHT_EXCEPTION, CMD_ADD_EXCEPTION_BREAK, CMD_SET_BREAK, \ + CMD_SET_NEXT_STATEMENT, CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION, \ + CMD_THREAD_RESUME_SINGLE_NOTIFICATION, CMD_THREAD_KILL, CMD_STOP_ON_START, CMD_INPUT_REQUESTED, \ + CMD_EXIT, CMD_STEP_INTO_COROUTINE, CMD_STEP_RETURN_MY_CODE, CMD_SMART_STEP_INTO, \ + CMD_SET_FUNCTION_BREAK +from _pydevd_bundle.pydevd_constants import get_thread_id, ForkSafeLock +from _pydevd_bundle.pydevd_net_command import NetCommand, NULL_NET_COMMAND +from _pydevd_bundle.pydevd_net_command_factory_xml import NetCommandFactory +from _pydevd_bundle.pydevd_utils import get_non_pydevd_threads +import pydevd_file_utils +from _pydevd_bundle.pydevd_comm import build_exception_info_response +from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info +from _pydevd_bundle import pydevd_frame_utils, pydevd_constants, pydevd_utils +import linecache +from _pydevd_bundle.pydevd_thread_lifecycle import pydevd_find_thread_by_id +from io import StringIO + + +class ModulesManager(object): + + def __init__(self): + self._lock = ForkSafeLock() + self._modules = {} + self._next_id = partial(next, itertools.count(0)) + + def track_module(self, filename_in_utf8, module_name, frame): + ''' + :return list(NetCommand): + Returns a list with the module events to be sent. + ''' + if filename_in_utf8 in self._modules: + return [] + + module_events = [] + with self._lock: + # Must check again after getting the lock. + if filename_in_utf8 in self._modules: + return + + try: + version = str(frame.f_globals.get('__version__', '')) + except: + version = '' + + try: + package_name = str(frame.f_globals.get('__package__', '')) + except: + package_name = '' + + module_id = self._next_id() + + module = Module(module_id, module_name, filename_in_utf8) + if version: + module.version = version + + if package_name: + # Note: package doesn't appear in the docs but seems to be expected? + module.kwargs['package'] = package_name + + module_event = ModuleEvent(ModuleEventBody('new', module)) + + module_events.append(NetCommand(CMD_MODULE_EVENT, 0, module_event, is_json=True)) + + self._modules[filename_in_utf8] = module.to_dict() + return module_events + + def get_modules_info(self): + ''' + :return list(Module) + ''' + with self._lock: + return list(self._modules.values()) + + +class NetCommandFactoryJson(NetCommandFactory): + ''' + Factory for commands which will provide messages as json (they should be + similar to the debug adapter where possible, although some differences + are currently Ok). + + Note that it currently overrides the xml version so that messages + can be done one at a time (any message not overridden will currently + use the xml version) -- after having all messages handled, it should + no longer use NetCommandFactory as the base class. + ''' + + def __init__(self): + NetCommandFactory.__init__(self) + self.modules_manager = ModulesManager() + + @overrides(NetCommandFactory.make_version_message) + def make_version_message(self, seq): + return NULL_NET_COMMAND # Not a part of the debug adapter protocol + + @overrides(NetCommandFactory.make_protocol_set_message) + def make_protocol_set_message(self, seq): + return NULL_NET_COMMAND # Not a part of the debug adapter protocol + + @overrides(NetCommandFactory.make_thread_created_message) + def make_thread_created_message(self, thread): + + # Note: the thread id for the debug adapter must be an int + # (make the actual id from get_thread_id respect that later on). + msg = pydevd_schema.ThreadEvent( + pydevd_schema.ThreadEventBody('started', get_thread_id(thread)), + ) + + return NetCommand(CMD_THREAD_CREATE, 0, msg, is_json=True) + + @overrides(NetCommandFactory.make_custom_frame_created_message) + def make_custom_frame_created_message(self, frame_id, frame_description): + self._additional_thread_id_to_thread_name[frame_id] = frame_description + msg = pydevd_schema.ThreadEvent( + pydevd_schema.ThreadEventBody('started', frame_id), + ) + + return NetCommand(CMD_THREAD_CREATE, 0, msg, is_json=True) + + @overrides(NetCommandFactory.make_thread_killed_message) + def make_thread_killed_message(self, tid): + self._additional_thread_id_to_thread_name.pop(tid, None) + msg = pydevd_schema.ThreadEvent( + pydevd_schema.ThreadEventBody('exited', tid), + ) + + return NetCommand(CMD_THREAD_KILL, 0, msg, is_json=True) + + @overrides(NetCommandFactory.make_list_threads_message) + def make_list_threads_message(self, py_db, seq): + threads = [] + for thread in get_non_pydevd_threads(): + if is_thread_alive(thread): + thread_id = get_thread_id(thread) + + # Notify that it's created (no-op if we already notified before). + py_db.notify_thread_created(thread_id, thread) + + thread_schema = pydevd_schema.Thread(id=thread_id, name=thread.name) + threads.append(thread_schema.to_dict()) + + for thread_id, thread_name in list(self._additional_thread_id_to_thread_name.items()): + thread_schema = pydevd_schema.Thread(id=thread_id, name=thread_name) + threads.append(thread_schema.to_dict()) + + body = pydevd_schema.ThreadsResponseBody(threads) + response = pydevd_schema.ThreadsResponse( + request_seq=seq, success=True, command='threads', body=body) + + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + @overrides(NetCommandFactory.make_get_completions_message) + def make_get_completions_message(self, seq, completions, qualifier, start): + COMPLETION_TYPE_LOOK_UP = { + TYPE_IMPORT: pydevd_schema.CompletionItemType.MODULE, + TYPE_CLASS: pydevd_schema.CompletionItemType.CLASS, + TYPE_FUNCTION: pydevd_schema.CompletionItemType.FUNCTION, + TYPE_ATTR: pydevd_schema.CompletionItemType.FIELD, + TYPE_BUILTIN: pydevd_schema.CompletionItemType.KEYWORD, + TYPE_PARAM: pydevd_schema.CompletionItemType.VARIABLE, + } + + qualifier = qualifier.lower() + qualifier_len = len(qualifier) + targets = [] + for completion in completions: + label = completion[0] + if label.lower().startswith(qualifier): + completion = pydevd_schema.CompletionItem( + label=label, type=COMPLETION_TYPE_LOOK_UP[completion[3]], start=start, length=qualifier_len) + targets.append(completion.to_dict()) + + body = pydevd_schema.CompletionsResponseBody(targets) + response = pydevd_schema.CompletionsResponse( + request_seq=seq, success=True, command='completions', body=body) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def _format_frame_name(self, fmt, initial_name, module_name, line, path): + if fmt is None: + return initial_name + frame_name = initial_name + if fmt.get('module', False): + if module_name: + if initial_name == '': + frame_name = module_name + else: + frame_name = '%s.%s' % (module_name, initial_name) + else: + basename = os.path.basename(path) + basename = basename[0:-3] if basename.lower().endswith('.py') else basename + if initial_name == '': + frame_name = '%s in %s' % (initial_name, basename) + else: + frame_name = '%s.%s' % (basename, initial_name) + + if fmt.get('line', False): + frame_name = '%s : %d' % (frame_name, line) + + return frame_name + + @overrides(NetCommandFactory.make_get_thread_stack_message) + def make_get_thread_stack_message(self, py_db, seq, thread_id, topmost_frame, fmt, must_be_suspended=False, start_frame=0, levels=0): + frames = [] + module_events = [] + + try: + # : :type suspended_frames_manager: SuspendedFramesManager + suspended_frames_manager = py_db.suspended_frames_manager + frames_list = suspended_frames_manager.get_frames_list(thread_id) + if frames_list is None: + # Could not find stack of suspended frame... + if must_be_suspended: + return None + else: + frames_list = pydevd_frame_utils.create_frames_list_from_frame(topmost_frame) + + for frame_id, frame, method_name, original_filename, filename_in_utf8, lineno, applied_mapping, show_as_current_frame in self._iter_visible_frames_info( + py_db, frames_list + ): + + try: + module_name = str(frame.f_globals.get('__name__', '')) + except: + module_name = '' + + module_events.extend(self.modules_manager.track_module(filename_in_utf8, module_name, frame)) + + presentation_hint = None + if not getattr(frame, 'IS_PLUGIN_FRAME', False): # Never filter out plugin frames! + if py_db.is_files_filter_enabled and py_db.apply_files_filter(frame, original_filename, False): + continue + + if not py_db.in_project_scope(frame): + presentation_hint = 'subtle' + + formatted_name = self._format_frame_name(fmt, method_name, module_name, lineno, filename_in_utf8) + if show_as_current_frame: + formatted_name += ' (Current frame)' + source_reference = pydevd_file_utils.get_client_filename_source_reference(filename_in_utf8) + + if not source_reference and not applied_mapping and not os.path.exists(original_filename): + if getattr(frame.f_code, 'co_lnotab', None): + # Create a source-reference to be used where we provide the source by decompiling the code. + # Note: When the time comes to retrieve the source reference in this case, we'll + # check the linecache first (see: get_decompiled_source_from_frame_id). + source_reference = pydevd_file_utils.create_source_reference_for_frame_id(frame_id, original_filename) + else: + # Check if someone added a source reference to the linecache (Python attrs does this). + if linecache.getline(original_filename, 1): + source_reference = pydevd_file_utils.create_source_reference_for_linecache( + original_filename) + + frames.append(pydevd_schema.StackFrame( + frame_id, formatted_name, lineno, column=1, source={ + 'path': filename_in_utf8, + 'sourceReference': source_reference, + }, + presentationHint=presentation_hint).to_dict()) + finally: + topmost_frame = None + + for module_event in module_events: + py_db.writer.add_command(module_event) + + total_frames = len(frames) + stack_frames = frames + if bool(levels): + start = start_frame + end = min(start + levels, total_frames) + stack_frames = frames[start:end] + + response = pydevd_schema.StackTraceResponse( + request_seq=seq, + success=True, + command='stackTrace', + body=pydevd_schema.StackTraceResponseBody(stackFrames=stack_frames, totalFrames=total_frames)) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + @overrides(NetCommandFactory.make_warning_message) + def make_warning_message(self, msg): + category = 'important' + body = OutputEventBody(msg, category) + event = OutputEvent(body) + return NetCommand(CMD_WRITE_TO_CONSOLE, 0, event, is_json=True) + + @overrides(NetCommandFactory.make_io_message) + def make_io_message(self, msg, ctx): + category = 'stdout' if int(ctx) == 1 else 'stderr' + body = OutputEventBody(msg, category) + event = OutputEvent(body) + return NetCommand(CMD_WRITE_TO_CONSOLE, 0, event, is_json=True) + + _STEP_REASONS = set([ + CMD_STEP_INTO, + CMD_STEP_INTO_MY_CODE, + CMD_STEP_OVER, + CMD_STEP_OVER_MY_CODE, + CMD_STEP_RETURN, + CMD_STEP_RETURN_MY_CODE, + CMD_STEP_INTO_MY_CODE, + CMD_STOP_ON_START, + CMD_STEP_INTO_COROUTINE, + CMD_SMART_STEP_INTO, + ]) + _EXCEPTION_REASONS = set([ + CMD_STEP_CAUGHT_EXCEPTION, + CMD_ADD_EXCEPTION_BREAK, + ]) + + @overrides(NetCommandFactory.make_thread_suspend_single_notification) + def make_thread_suspend_single_notification(self, py_db, thread_id, stop_reason): + exc_desc = None + exc_name = None + thread = pydevd_find_thread_by_id(thread_id) + info = set_additional_thread_info(thread) + + preserve_focus_hint = False + if stop_reason in self._STEP_REASONS: + if info.pydev_original_step_cmd == CMD_STOP_ON_START: + + # Just to make sure that's not set as the original reason anymore. + info.pydev_original_step_cmd = -1 + stop_reason = 'entry' + else: + stop_reason = 'step' + elif stop_reason in self._EXCEPTION_REASONS: + stop_reason = 'exception' + elif stop_reason == CMD_SET_BREAK: + stop_reason = 'breakpoint' + elif stop_reason == CMD_SET_FUNCTION_BREAK: + stop_reason = 'function breakpoint' + elif stop_reason == CMD_SET_NEXT_STATEMENT: + stop_reason = 'goto' + else: + stop_reason = 'pause' + preserve_focus_hint = True + + if stop_reason == 'exception': + exception_info_response = build_exception_info_response( + py_db, thread_id, -1, set_additional_thread_info, self._iter_visible_frames_info, max_frames=-1) + exception_info_response + + exc_name = exception_info_response.body.exceptionId + exc_desc = exception_info_response.body.description + + body = pydevd_schema.StoppedEventBody( + reason=stop_reason, + description=exc_desc, + threadId=thread_id, + text=exc_name, + allThreadsStopped=True, + preserveFocusHint=preserve_focus_hint, + ) + event = pydevd_schema.StoppedEvent(body) + return NetCommand(CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION, 0, event, is_json=True) + + @overrides(NetCommandFactory.make_thread_resume_single_notification) + def make_thread_resume_single_notification(self, thread_id): + body = ContinuedEventBody(threadId=thread_id, allThreadsContinued=True) + event = pydevd_schema.ContinuedEvent(body) + return NetCommand(CMD_THREAD_RESUME_SINGLE_NOTIFICATION, 0, event, is_json=True) + + @overrides(NetCommandFactory.make_set_next_stmnt_status_message) + def make_set_next_stmnt_status_message(self, seq, is_success, exception_msg): + response = pydevd_schema.GotoResponse( + request_seq=int(seq), + success=is_success, + command='goto', + body={}, + message=(None if is_success else exception_msg)) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + @overrides(NetCommandFactory.make_send_curr_exception_trace_message) + def make_send_curr_exception_trace_message(self, *args, **kwargs): + return NULL_NET_COMMAND # Not a part of the debug adapter protocol + + @overrides(NetCommandFactory.make_send_curr_exception_trace_proceeded_message) + def make_send_curr_exception_trace_proceeded_message(self, *args, **kwargs): + return NULL_NET_COMMAND # Not a part of the debug adapter protocol + + @overrides(NetCommandFactory.make_send_breakpoint_exception_message) + def make_send_breakpoint_exception_message(self, *args, **kwargs): + return NULL_NET_COMMAND # Not a part of the debug adapter protocol + + @overrides(NetCommandFactory.make_process_created_message) + def make_process_created_message(self, *args, **kwargs): + return NULL_NET_COMMAND # Not a part of the debug adapter protocol + + @overrides(NetCommandFactory.make_process_about_to_be_replaced_message) + def make_process_about_to_be_replaced_message(self): + event = ExitedEvent(ExitedEventBody(-1, pydevdReason="processReplaced")) + + cmd = NetCommand(CMD_RETURN, 0, event, is_json=True) + + def after_send(socket): + socket.setsockopt(socket_module.IPPROTO_TCP, socket_module.TCP_NODELAY, 1) + + cmd.call_after_send(after_send) + return cmd + + @overrides(NetCommandFactory.make_thread_suspend_message) + def make_thread_suspend_message(self, *args, **kwargs): + return NULL_NET_COMMAND # Not a part of the debug adapter protocol + + @overrides(NetCommandFactory.make_thread_run_message) + def make_thread_run_message(self, *args, **kwargs): + return NULL_NET_COMMAND # Not a part of the debug adapter protocol + + @overrides(NetCommandFactory.make_reloaded_code_message) + def make_reloaded_code_message(self, *args, **kwargs): + return NULL_NET_COMMAND # Not a part of the debug adapter protocol + + @overrides(NetCommandFactory.make_input_requested_message) + def make_input_requested_message(self, started): + event = pydevd_schema.PydevdInputRequestedEvent(body={}) + return NetCommand(CMD_INPUT_REQUESTED, 0, event, is_json=True) + + @overrides(NetCommandFactory.make_skipped_step_in_because_of_filters) + def make_skipped_step_in_because_of_filters(self, py_db, frame): + msg = 'Frame skipped from debugging during step-in.' + if py_db.get_use_libraries_filter(): + msg += ('\nNote: may have been skipped because of "justMyCode" option (default == true). ' + 'Try setting \"justMyCode\": false in the debug configuration (e.g., launch.json).\n') + return self.make_warning_message(msg) + + @overrides(NetCommandFactory.make_evaluation_timeout_msg) + def make_evaluation_timeout_msg(self, py_db, expression, curr_thread): + msg = '''Evaluating: %s did not finish after %.2f seconds. +This may mean a number of things: +- This evaluation is really slow and this is expected. + In this case it's possible to silence this error by raising the timeout, setting the + PYDEVD_WARN_EVALUATION_TIMEOUT environment variable to a bigger value. + +- The evaluation may need other threads running while it's running: + In this case, it's possible to set the PYDEVD_UNBLOCK_THREADS_TIMEOUT + environment variable so that if after a given timeout an evaluation doesn't finish, + other threads are unblocked or you can manually resume all threads. + + Alternatively, it's also possible to skip breaking on a particular thread by setting a + `pydev_do_not_trace = True` attribute in the related threading.Thread instance + (if some thread should always be running and no breakpoints are expected to be hit in it). + +- The evaluation is deadlocked: + In this case you may set the PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT + environment variable to true so that a thread dump is shown along with this message and + optionally, set the PYDEVD_INTERRUPT_THREAD_TIMEOUT to some value so that the debugger + tries to interrupt the evaluation (if possible) when this happens. +''' % (expression, pydevd_constants.PYDEVD_WARN_EVALUATION_TIMEOUT) + + if pydevd_constants.PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT: + stream = StringIO() + pydevd_utils.dump_threads(stream, show_pydevd_threads=False) + msg += '\n\n%s\n' % stream.getvalue() + return self.make_warning_message(msg) + + @overrides(NetCommandFactory.make_exit_command) + def make_exit_command(self, py_db): + event = pydevd_schema.TerminatedEvent(pydevd_schema.TerminatedEventBody()) + return NetCommand(CMD_EXIT, 0, event, is_json=True) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_xml.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_xml.py new file mode 100644 index 0000000..072ba98 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_xml.py @@ -0,0 +1,493 @@ +import json + +from _pydev_bundle.pydev_is_thread_alive import is_thread_alive +from _pydev_bundle._pydev_saved_modules import thread +from _pydevd_bundle import pydevd_xml, pydevd_frame_utils, pydevd_constants, pydevd_utils +from _pydevd_bundle.pydevd_comm_constants import ( + CMD_THREAD_CREATE, CMD_THREAD_KILL, CMD_THREAD_SUSPEND, CMD_THREAD_RUN, CMD_GET_VARIABLE, + CMD_EVALUATE_EXPRESSION, CMD_GET_FRAME, CMD_WRITE_TO_CONSOLE, CMD_GET_COMPLETIONS, + CMD_LOAD_SOURCE, CMD_SET_NEXT_STATEMENT, CMD_EXIT, CMD_GET_FILE_CONTENTS, + CMD_EVALUATE_CONSOLE_EXPRESSION, CMD_RUN_CUSTOM_OPERATION, + CMD_GET_BREAKPOINT_EXCEPTION, CMD_SEND_CURR_EXCEPTION_TRACE, + CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED, CMD_SHOW_CONSOLE, CMD_GET_ARRAY, + CMD_INPUT_REQUESTED, CMD_GET_DESCRIPTION, CMD_PROCESS_CREATED, + CMD_SHOW_CYTHON_WARNING, CMD_LOAD_FULL_VALUE, CMD_GET_THREAD_STACK, + CMD_GET_EXCEPTION_DETAILS, CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION, + CMD_THREAD_RESUME_SINGLE_NOTIFICATION, + CMD_GET_NEXT_STATEMENT_TARGETS, CMD_VERSION, + CMD_RETURN, CMD_SET_PROTOCOL, CMD_ERROR, MAX_IO_MSG_SIZE, VERSION_STRING, + CMD_RELOAD_CODE, CMD_LOAD_SOURCE_FROM_FRAME_ID) +from _pydevd_bundle.pydevd_constants import (DebugInfoHolder, get_thread_id, + get_global_debugger, GetGlobalDebugger, set_global_debugger) # Keep for backward compatibility @UnusedImport +from _pydevd_bundle.pydevd_net_command import NetCommand, NULL_NET_COMMAND, NULL_EXIT_COMMAND +from _pydevd_bundle.pydevd_utils import quote_smart as quote, get_non_pydevd_threads +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame +import pydevd_file_utils +from pydevd_tracing import get_exception_traceback_str +from _pydev_bundle._pydev_completer import completions_to_xml +from _pydev_bundle import pydev_log +from _pydevd_bundle.pydevd_frame_utils import FramesList +from io import StringIO + + +#======================================================================================================================= +# NetCommandFactory +#======================================================================================================================= +class NetCommandFactory(object): + + def __init__(self): + self._additional_thread_id_to_thread_name = {} + + def _thread_to_xml(self, thread): + """ thread information as XML """ + name = pydevd_xml.make_valid_xml_value(thread.name) + cmd_text = '' % (quote(name), get_thread_id(thread)) + return cmd_text + + def make_error_message(self, seq, text): + cmd = NetCommand(CMD_ERROR, seq, text) + if DebugInfoHolder.DEBUG_TRACE_LEVEL > 2: + pydev_log.error("Error: %s" % (text,)) + return cmd + + def make_protocol_set_message(self, seq): + return NetCommand(CMD_SET_PROTOCOL, seq, '') + + def make_thread_created_message(self, thread): + cmdText = "" + self._thread_to_xml(thread) + "" + return NetCommand(CMD_THREAD_CREATE, 0, cmdText) + + def make_process_created_message(self): + cmdText = '' + return NetCommand(CMD_PROCESS_CREATED, 0, cmdText) + + def make_process_about_to_be_replaced_message(self): + return NULL_NET_COMMAND + + def make_show_cython_warning_message(self): + try: + return NetCommand(CMD_SHOW_CYTHON_WARNING, 0, '') + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_custom_frame_created_message(self, frame_id, frame_description): + self._additional_thread_id_to_thread_name[frame_id] = frame_description + frame_description = pydevd_xml.make_valid_xml_value(frame_description) + return NetCommand(CMD_THREAD_CREATE, 0, '' % (frame_description, frame_id)) + + def make_list_threads_message(self, py_db, seq): + """ returns thread listing as XML """ + try: + threads = get_non_pydevd_threads() + cmd_text = [""] + append = cmd_text.append + for thread in threads: + if is_thread_alive(thread): + append(self._thread_to_xml(thread)) + + for thread_id, thread_name in list(self._additional_thread_id_to_thread_name.items()): + name = pydevd_xml.make_valid_xml_value(thread_name) + append('' % (quote(name), thread_id)) + + append("") + return NetCommand(CMD_RETURN, seq, ''.join(cmd_text)) + except: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_thread_stack_message(self, py_db, seq, thread_id, topmost_frame, fmt, must_be_suspended=False, start_frame=0, levels=0): + """ + Returns thread stack as XML. + + :param must_be_suspended: If True and the thread is not suspended, returns None. + """ + try: + # If frame is None, the return is an empty frame list. + cmd_text = ['' % (thread_id,)] + + if topmost_frame is not None: + try: + # : :type suspended_frames_manager: SuspendedFramesManager + suspended_frames_manager = py_db.suspended_frames_manager + frames_list = suspended_frames_manager.get_frames_list(thread_id) + if frames_list is None: + # Could not find stack of suspended frame... + if must_be_suspended: + return None + else: + frames_list = pydevd_frame_utils.create_frames_list_from_frame(topmost_frame) + + cmd_text.append(self.make_thread_stack_str(py_db, frames_list)) + finally: + topmost_frame = None + cmd_text.append('') + return NetCommand(CMD_GET_THREAD_STACK, seq, ''.join(cmd_text)) + except: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_variable_changed_message(self, seq, payload): + # notify debugger that value was changed successfully + return NetCommand(CMD_RETURN, seq, payload) + + def make_warning_message(self, msg): + return self.make_io_message(msg, 2) + + def make_io_message(self, msg, ctx): + ''' + @param msg: the message to pass to the debug server + @param ctx: 1 for stdio 2 for stderr + ''' + try: + msg = pydevd_constants.as_str(msg) + + if len(msg) > MAX_IO_MSG_SIZE: + msg = msg[0:MAX_IO_MSG_SIZE] + msg += '...' + + msg = pydevd_xml.make_valid_xml_value(quote(msg, '/>_= ')) + return NetCommand(str(CMD_WRITE_TO_CONSOLE), 0, '' % (msg, ctx)) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_version_message(self, seq): + try: + return NetCommand(CMD_VERSION, seq, VERSION_STRING) + except: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_thread_killed_message(self, tid): + self._additional_thread_id_to_thread_name.pop(tid, None) + try: + return NetCommand(CMD_THREAD_KILL, 0, str(tid)) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def _iter_visible_frames_info(self, py_db, frames_list): + assert frames_list.__class__ == FramesList + for frame in frames_list: + show_as_current_frame = frame is frames_list.current_frame + if frame.f_code is None: + pydev_log.info('Frame without f_code: %s', frame) + continue # IronPython sometimes does not have it! + + method_name = frame.f_code.co_name # method name (if in method) or ? if global + if method_name is None: + pydev_log.info('Frame without co_name: %s', frame) + continue # IronPython sometimes does not have it! + + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + if py_db.get_file_type(frame, abs_path_real_path_and_base) == py_db.PYDEV_FILE: + # Skip pydevd files. + frame = frame.f_back + continue + + frame_id = id(frame) + lineno = frames_list.frame_id_to_lineno.get(frame_id, frame.f_lineno) + + filename_in_utf8, lineno, changed = py_db.source_mapping.map_to_client(abs_path_real_path_and_base[0], lineno) + new_filename_in_utf8, applied_mapping = pydevd_file_utils.map_file_to_client(filename_in_utf8) + applied_mapping = applied_mapping or changed + + yield frame_id, frame, method_name, abs_path_real_path_and_base[0], new_filename_in_utf8, lineno, applied_mapping, show_as_current_frame + + def make_thread_stack_str(self, py_db, frames_list): + assert frames_list.__class__ == FramesList + make_valid_xml_value = pydevd_xml.make_valid_xml_value + cmd_text_list = [] + append = cmd_text_list.append + + try: + for frame_id, frame, method_name, _original_filename, filename_in_utf8, lineno, _applied_mapping, _show_as_current_frame in self._iter_visible_frames_info( + py_db, frames_list + ): + + # print("file is ", filename_in_utf8) + # print("line is ", lineno) + + # Note: variables are all gotten 'on-demand'. + append('' % (quote(make_valid_xml_value(filename_in_utf8), '/>_= \t'), lineno)) + append("") + except: + pydev_log.exception() + + return ''.join(cmd_text_list) + + def make_thread_suspend_str( + self, + py_db, + thread_id, + frames_list, + stop_reason=None, + message=None, + suspend_type="trace", + ): + """ + :return tuple(str,str): + Returns tuple(thread_suspended_str, thread_stack_str). + + i.e.: + ( + ''' + + + + + + + ''' + , + ''' + + + ''' + ) + """ + assert frames_list.__class__ == FramesList + make_valid_xml_value = pydevd_xml.make_valid_xml_value + cmd_text_list = [] + append = cmd_text_list.append + + cmd_text_list.append('') + if message: + message = make_valid_xml_value(message) + + append('') + thread_stack_str = self.make_thread_stack_str(py_db, frames_list) + append(thread_stack_str) + append("") + + return ''.join(cmd_text_list), thread_stack_str + + def make_thread_suspend_message(self, py_db, thread_id, frames_list, stop_reason, message, suspend_type): + try: + thread_suspend_str, thread_stack_str = self.make_thread_suspend_str( + py_db, thread_id, frames_list, stop_reason, message, suspend_type) + cmd = NetCommand(CMD_THREAD_SUSPEND, 0, thread_suspend_str) + cmd.thread_stack_str = thread_stack_str + cmd.thread_suspend_str = thread_suspend_str + return cmd + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_thread_suspend_single_notification(self, py_db, thread_id, stop_reason): + try: + return NetCommand(CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION, 0, json.dumps( + {'thread_id': thread_id, 'stop_reason':stop_reason})) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_thread_resume_single_notification(self, thread_id): + try: + return NetCommand(CMD_THREAD_RESUME_SINGLE_NOTIFICATION, 0, json.dumps( + {'thread_id': thread_id})) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_thread_run_message(self, thread_id, reason): + try: + return NetCommand(CMD_THREAD_RUN, 0, "%s\t%s" % (thread_id, reason)) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_get_variable_message(self, seq, payload): + try: + return NetCommand(CMD_GET_VARIABLE, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_array_message(self, seq, payload): + try: + return NetCommand(CMD_GET_ARRAY, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_description_message(self, seq, payload): + try: + return NetCommand(CMD_GET_DESCRIPTION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_frame_message(self, seq, payload): + try: + return NetCommand(CMD_GET_FRAME, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_evaluate_expression_message(self, seq, payload): + try: + return NetCommand(CMD_EVALUATE_EXPRESSION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_completions_message(self, seq, completions, qualifier, start): + try: + payload = completions_to_xml(completions) + return NetCommand(CMD_GET_COMPLETIONS, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_file_contents(self, seq, payload): + try: + return NetCommand(CMD_GET_FILE_CONTENTS, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_reloaded_code_message(self, seq, reloaded_ok): + try: + return NetCommand(CMD_RELOAD_CODE, seq, '' % reloaded_ok) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_send_breakpoint_exception_message(self, seq, payload): + try: + return NetCommand(CMD_GET_BREAKPOINT_EXCEPTION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def _make_send_curr_exception_trace_str(self, py_db, thread_id, exc_type, exc_desc, trace_obj): + frames_list = pydevd_frame_utils.create_frames_list_from_traceback(trace_obj, None, exc_type, exc_desc) + + exc_type = pydevd_xml.make_valid_xml_value(str(exc_type)).replace('\t', ' ') or 'exception: type unknown' + exc_desc = pydevd_xml.make_valid_xml_value(str(exc_desc)).replace('\t', ' ') or 'exception: no description' + + thread_suspend_str, thread_stack_str = self.make_thread_suspend_str( + py_db, thread_id, frames_list, CMD_SEND_CURR_EXCEPTION_TRACE, '') + return exc_type, exc_desc, thread_suspend_str, thread_stack_str + + def make_send_curr_exception_trace_message(self, py_db, seq, thread_id, curr_frame_id, exc_type, exc_desc, trace_obj): + try: + exc_type, exc_desc, thread_suspend_str, _thread_stack_str = self._make_send_curr_exception_trace_str( + py_db, thread_id, exc_type, exc_desc, trace_obj) + payload = str(curr_frame_id) + '\t' + exc_type + "\t" + exc_desc + "\t" + thread_suspend_str + return NetCommand(CMD_SEND_CURR_EXCEPTION_TRACE, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_exception_details_message(self, py_db, seq, thread_id, topmost_frame): + """Returns exception details as XML """ + try: + # If the debugger is not suspended, just return the thread and its id. + cmd_text = ['') + cmd_text.append(thread_stack_str) + break + frame = frame.f_back + else: + cmd_text.append('>') + finally: + frame = None + cmd_text.append('') + return NetCommand(CMD_GET_EXCEPTION_DETAILS, seq, ''.join(cmd_text)) + except: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_send_curr_exception_trace_proceeded_message(self, seq, thread_id): + try: + return NetCommand(CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED, 0, str(thread_id)) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_send_console_message(self, seq, payload): + try: + return NetCommand(CMD_EVALUATE_CONSOLE_EXPRESSION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_custom_operation_message(self, seq, payload): + try: + return NetCommand(CMD_RUN_CUSTOM_OPERATION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_load_source_message(self, seq, source): + return NetCommand(CMD_LOAD_SOURCE, seq, source) + + def make_load_source_from_frame_id_message(self, seq, source): + return NetCommand(CMD_LOAD_SOURCE_FROM_FRAME_ID, seq, source) + + def make_show_console_message(self, py_db, thread_id, frame): + try: + frames_list = pydevd_frame_utils.create_frames_list_from_frame(frame) + thread_suspended_str, _thread_stack_str = self.make_thread_suspend_str( + py_db, thread_id, frames_list, CMD_SHOW_CONSOLE, '') + return NetCommand(CMD_SHOW_CONSOLE, 0, thread_suspended_str) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_input_requested_message(self, started): + try: + return NetCommand(CMD_INPUT_REQUESTED, 0, str(started)) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_set_next_stmnt_status_message(self, seq, is_success, exception_msg): + try: + message = str(is_success) + '\t' + exception_msg + return NetCommand(CMD_SET_NEXT_STATEMENT, int(seq), message) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_load_full_value_message(self, seq, payload): + try: + return NetCommand(CMD_LOAD_FULL_VALUE, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_next_statement_targets_message(self, seq, payload): + try: + return NetCommand(CMD_GET_NEXT_STATEMENT_TARGETS, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_skipped_step_in_because_of_filters(self, py_db, frame): + return NULL_NET_COMMAND # Not a part of the xml protocol + + def make_evaluation_timeout_msg(self, py_db, expression, thread): + msg = '''pydevd: Evaluating: %s did not finish after %.2f seconds. +This may mean a number of things: +- This evaluation is really slow and this is expected. + In this case it's possible to silence this error by raising the timeout, setting the + PYDEVD_WARN_EVALUATION_TIMEOUT environment variable to a bigger value. + +- The evaluation may need other threads running while it's running: + In this case, you may need to manually let other paused threads continue. + + Alternatively, it's also possible to skip breaking on a particular thread by setting a + `pydev_do_not_trace = True` attribute in the related threading.Thread instance + (if some thread should always be running and no breakpoints are expected to be hit in it). + +- The evaluation is deadlocked: + In this case you may set the PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT + environment variable to true so that a thread dump is shown along with this message and + optionally, set the PYDEVD_INTERRUPT_THREAD_TIMEOUT to some value so that the debugger + tries to interrupt the evaluation (if possible) when this happens. +''' % (expression, pydevd_constants.PYDEVD_WARN_EVALUATION_TIMEOUT) + + if pydevd_constants.PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT: + stream = StringIO() + pydevd_utils.dump_threads(stream, show_pydevd_threads=False) + msg += '\n\n%s\n' % stream.getvalue() + return self.make_warning_message(msg) + + def make_exit_command(self, py_db): + return NULL_EXIT_COMMAND diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_plugin_utils.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_plugin_utils.py new file mode 120000 index 0000000..c1d5713 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_plugin_utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e5/ac/c8/b753067ed2b007b639613b6f18f0bd6a042def1336df38ef2aa1a37a28 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command.py new file mode 100644 index 0000000..e3afdcd --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command.py @@ -0,0 +1,757 @@ +import json +import os +import sys +import traceback + +from _pydev_bundle import pydev_log +from _pydev_bundle.pydev_log import exception as pydev_log_exception +from _pydevd_bundle import pydevd_traceproperty, pydevd_dont_trace, pydevd_utils +from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info +from _pydevd_bundle.pydevd_breakpoints import get_exception_class +from _pydevd_bundle.pydevd_comm import ( + InternalEvaluateConsoleExpression, InternalConsoleGetCompletions, InternalRunCustomOperation, + internal_get_next_statement_targets, internal_get_smart_step_into_variants) +from _pydevd_bundle.pydevd_constants import NEXT_VALUE_SEPARATOR, IS_WINDOWS, NULL +from _pydevd_bundle.pydevd_comm_constants import ID_TO_MEANING, CMD_EXEC_EXPRESSION, CMD_AUTHENTICATE +from _pydevd_bundle.pydevd_api import PyDevdAPI +from io import StringIO +from _pydevd_bundle.pydevd_net_command import NetCommand +from _pydevd_bundle.pydevd_thread_lifecycle import pydevd_find_thread_by_id +import pydevd_file_utils + + +class _PyDevCommandProcessor(object): + + def __init__(self): + self.api = PyDevdAPI() + + def process_net_command(self, py_db, cmd_id, seq, text): + '''Processes a command received from the Java side + + @param cmd_id: the id of the command + @param seq: the sequence of the command + @param text: the text received in the command + ''' + + # We can only proceed if the client is already authenticated or if it's the + # command to authenticate. + if cmd_id != CMD_AUTHENTICATE and not py_db.authentication.is_authenticated(): + cmd = py_db.cmd_factory.make_error_message(seq, 'Client not authenticated.') + py_db.writer.add_command(cmd) + return + + meaning = ID_TO_MEANING[str(cmd_id)] + + # print('Handling %s (%s)' % (meaning, text)) + + method_name = meaning.lower() + + on_command = getattr(self, method_name.lower(), None) + if on_command is None: + # I have no idea what this is all about + cmd = py_db.cmd_factory.make_error_message(seq, "unexpected command " + str(cmd_id)) + py_db.writer.add_command(cmd) + return + + lock = py_db._main_lock + if method_name == 'cmd_thread_dump_to_stderr': + # We can skip the main debugger locks for cases where we know it's not needed. + lock = NULL + + with lock: + try: + cmd = on_command(py_db, cmd_id, seq, text) + if cmd is not None: + py_db.writer.add_command(cmd) + except: + if traceback is not None and sys is not None and pydev_log_exception is not None: + pydev_log_exception() + + stream = StringIO() + traceback.print_exc(file=stream) + cmd = py_db.cmd_factory.make_error_message( + seq, + "Unexpected exception in process_net_command.\nInitial params: %s. Exception: %s" % ( + ((cmd_id, seq, text), stream.getvalue()) + ) + ) + if cmd is not None: + py_db.writer.add_command(cmd) + + def cmd_authenticate(self, py_db, cmd_id, seq, text): + access_token = text + py_db.authentication.login(access_token) + if py_db.authentication.is_authenticated(): + return NetCommand(cmd_id, seq, py_db.authentication.client_access_token) + + return py_db.cmd_factory.make_error_message(seq, 'Client not authenticated.') + + def cmd_run(self, py_db, cmd_id, seq, text): + return self.api.run(py_db) + + def cmd_list_threads(self, py_db, cmd_id, seq, text): + return self.api.list_threads(py_db, seq) + + def cmd_get_completions(self, py_db, cmd_id, seq, text): + # we received some command to get a variable + # the text is: thread_id\tframe_id\tactivation token + thread_id, frame_id, _scope, act_tok = text.split('\t', 3) + + return self.api.request_completions(py_db, seq, thread_id, frame_id, act_tok) + + def cmd_get_thread_stack(self, py_db, cmd_id, seq, text): + # Receives a thread_id and a given timeout, which is the time we should + # wait to the provide the stack if a given thread is still not suspended. + if '\t' in text: + thread_id, timeout = text.split('\t') + timeout = float(timeout) + else: + thread_id = text + timeout = .5 # Default timeout is .5 seconds + + return self.api.request_stack(py_db, seq, thread_id, fmt={}, timeout=timeout) + + def cmd_set_protocol(self, py_db, cmd_id, seq, text): + return self.api.set_protocol(py_db, seq, text.strip()) + + def cmd_thread_suspend(self, py_db, cmd_id, seq, text): + return self.api.request_suspend_thread(py_db, text.strip()) + + def cmd_version(self, py_db, cmd_id, seq, text): + # Default based on server process (although ideally the IDE should + # provide it). + if IS_WINDOWS: + ide_os = 'WINDOWS' + else: + ide_os = 'UNIX' + + # Breakpoints can be grouped by 'LINE' or by 'ID'. + breakpoints_by = 'LINE' + + splitted = text.split('\t') + if len(splitted) == 1: + _local_version = splitted + + elif len(splitted) == 2: + _local_version, ide_os = splitted + + elif len(splitted) == 3: + _local_version, ide_os, breakpoints_by = splitted + + version_msg = self.api.set_ide_os_and_breakpoints_by(py_db, seq, ide_os, breakpoints_by) + + # Enable thread notifications after the version command is completed. + self.api.set_enable_thread_notifications(py_db, True) + + return version_msg + + def cmd_thread_run(self, py_db, cmd_id, seq, text): + return self.api.request_resume_thread(text.strip()) + + def _cmd_step(self, py_db, cmd_id, seq, text): + return self.api.request_step(py_db, text.strip(), cmd_id) + + cmd_step_into = _cmd_step + cmd_step_into_my_code = _cmd_step + cmd_step_over = _cmd_step + cmd_step_over_my_code = _cmd_step + cmd_step_return = _cmd_step + cmd_step_return_my_code = _cmd_step + + def _cmd_set_next(self, py_db, cmd_id, seq, text): + thread_id, line, func_name = text.split('\t', 2) + return self.api.request_set_next(py_db, seq, thread_id, cmd_id, None, line, func_name) + + cmd_run_to_line = _cmd_set_next + cmd_set_next_statement = _cmd_set_next + + def cmd_smart_step_into(self, py_db, cmd_id, seq, text): + thread_id, line_or_bytecode_offset, func_name = text.split('\t', 2) + if line_or_bytecode_offset.startswith('offset='): + # In this case we request the smart step into to stop given the parent frame + # and the location of the parent frame bytecode offset and not just the func_name + # (this implies that `CMD_GET_SMART_STEP_INTO_VARIANTS` was previously used + # to know what are the valid stop points). + + temp = line_or_bytecode_offset[len('offset='):] + if ';' in temp: + offset, child_offset = temp.split(';') + offset = int(offset) + child_offset = int(child_offset) + else: + child_offset = -1 + offset = int(temp) + return self.api.request_smart_step_into(py_db, seq, thread_id, offset, child_offset) + else: + # If the offset wasn't passed, just use the line/func_name to do the stop. + return self.api.request_smart_step_into_by_func_name(py_db, seq, thread_id, line_or_bytecode_offset, func_name) + + def cmd_reload_code(self, py_db, cmd_id, seq, text): + text = text.strip() + if '\t' not in text: + module_name = text.strip() + filename = None + else: + module_name, filename = text.split('\t', 1) + self.api.request_reload_code(py_db, seq, module_name, filename) + + def cmd_change_variable(self, py_db, cmd_id, seq, text): + # the text is: thread\tstackframe\tFRAME|GLOBAL\tattribute_to_change\tvalue_to_change + thread_id, frame_id, scope, attr_and_value = text.split('\t', 3) + + tab_index = attr_and_value.rindex('\t') + attr = attr_and_value[0:tab_index].replace('\t', '.') + value = attr_and_value[tab_index + 1:] + self.api.request_change_variable(py_db, seq, thread_id, frame_id, scope, attr, value) + + def cmd_get_variable(self, py_db, cmd_id, seq, text): + # we received some command to get a variable + # the text is: thread_id\tframe_id\tFRAME|GLOBAL\tattributes* + thread_id, frame_id, scopeattrs = text.split('\t', 2) + + if scopeattrs.find('\t') != -1: # there are attributes beyond scope + scope, attrs = scopeattrs.split('\t', 1) + else: + scope, attrs = (scopeattrs, None) + + self.api.request_get_variable(py_db, seq, thread_id, frame_id, scope, attrs) + + def cmd_get_array(self, py_db, cmd_id, seq, text): + # Note: untested and unused in pydev + # we received some command to get an array variable + # the text is: thread_id\tframe_id\tFRAME|GLOBAL\tname\ttemp\troffs\tcoffs\trows\tcols\tformat + roffset, coffset, rows, cols, format, thread_id, frame_id, scopeattrs = text.split('\t', 7) + + if scopeattrs.find('\t') != -1: # there are attributes beyond scope + scope, attrs = scopeattrs.split('\t', 1) + else: + scope, attrs = (scopeattrs, None) + + self.api.request_get_array(py_db, seq, roffset, coffset, rows, cols, format, thread_id, frame_id, scope, attrs) + + def cmd_show_return_values(self, py_db, cmd_id, seq, text): + show_return_values = text.split('\t')[1] + self.api.set_show_return_values(py_db, int(show_return_values) == 1) + + def cmd_load_full_value(self, py_db, cmd_id, seq, text): + # Note: untested and unused in pydev + thread_id, frame_id, scopeattrs = text.split('\t', 2) + vars = scopeattrs.split(NEXT_VALUE_SEPARATOR) + + self.api.request_load_full_value(py_db, seq, thread_id, frame_id, vars) + + def cmd_get_description(self, py_db, cmd_id, seq, text): + # Note: untested and unused in pydev + thread_id, frame_id, expression = text.split('\t', 2) + self.api.request_get_description(py_db, seq, thread_id, frame_id, expression) + + def cmd_get_frame(self, py_db, cmd_id, seq, text): + thread_id, frame_id, scope = text.split('\t', 2) + self.api.request_get_frame(py_db, seq, thread_id, frame_id) + + def cmd_set_break(self, py_db, cmd_id, seq, text): + # func name: 'None': match anything. Empty: match global, specified: only method context. + # command to add some breakpoint. + # text is filename\tline. Add to breakpoints dictionary + suspend_policy = u"NONE" # Can be 'NONE' or 'ALL' + is_logpoint = False + hit_condition = None + if py_db._set_breakpoints_with_id: + try: + try: + breakpoint_id, btype, filename, line, func_name, condition, expression, hit_condition, is_logpoint, suspend_policy = text.split(u'\t', 9) + except ValueError: # not enough values to unpack + # No suspend_policy passed (use default). + breakpoint_id, btype, filename, line, func_name, condition, expression, hit_condition, is_logpoint = text.split(u'\t', 8) + is_logpoint = is_logpoint == u'True' + except ValueError: # not enough values to unpack + breakpoint_id, btype, filename, line, func_name, condition, expression = text.split(u'\t', 6) + + breakpoint_id = int(breakpoint_id) + line = int(line) + + # We must restore new lines and tabs as done in + # AbstractDebugTarget.breakpointAdded + condition = condition.replace(u"@_@NEW_LINE_CHAR@_@", u'\n').\ + replace(u"@_@TAB_CHAR@_@", u'\t').strip() + + expression = expression.replace(u"@_@NEW_LINE_CHAR@_@", u'\n').\ + replace(u"@_@TAB_CHAR@_@", u'\t').strip() + else: + # Note: this else should be removed after PyCharm migrates to setting + # breakpoints by id (and ideally also provides func_name). + btype, filename, line, func_name, suspend_policy, condition, expression = text.split(u'\t', 6) + # If we don't have an id given for each breakpoint, consider + # the id to be the line. + breakpoint_id = line = int(line) + + condition = condition.replace(u"@_@NEW_LINE_CHAR@_@", u'\n'). \ + replace(u"@_@TAB_CHAR@_@", u'\t').strip() + + expression = expression.replace(u"@_@NEW_LINE_CHAR@_@", u'\n'). \ + replace(u"@_@TAB_CHAR@_@", u'\t').strip() + + if condition is not None and (len(condition) <= 0 or condition == u"None"): + condition = None + + if expression is not None and (len(expression) <= 0 or expression == u"None"): + expression = None + + if hit_condition is not None and (len(hit_condition) <= 0 or hit_condition == u"None"): + hit_condition = None + + def on_changed_breakpoint_state(breakpoint_id, add_breakpoint_result): + error_code = add_breakpoint_result.error_code + + translated_line = add_breakpoint_result.translated_line + translated_filename = add_breakpoint_result.translated_filename + msg = '' + if error_code: + + if error_code == self.api.ADD_BREAKPOINT_FILE_NOT_FOUND: + msg = 'pydev debugger: Trying to add breakpoint to file that does not exist: %s (will have no effect).\n' % (translated_filename,) + + elif error_code == self.api.ADD_BREAKPOINT_FILE_EXCLUDED_BY_FILTERS: + msg = 'pydev debugger: Trying to add breakpoint to file that is excluded by filters: %s (will have no effect).\n' % (translated_filename,) + + elif error_code == self.api.ADD_BREAKPOINT_LAZY_VALIDATION: + msg = '' # Ignore this here (if/when loaded, it'll call on_changed_breakpoint_state again accordingly). + + elif error_code == self.api.ADD_BREAKPOINT_INVALID_LINE: + msg = 'pydev debugger: Trying to add breakpoint to line (%s) that is not valid in: %s.\n' % (translated_line, translated_filename,) + + else: + # Shouldn't get here. + msg = 'pydev debugger: Breakpoint not validated (reason unknown -- please report as error): %s (%s).\n' % (translated_filename, translated_line) + + else: + if add_breakpoint_result.original_line != translated_line: + msg = 'pydev debugger (info): Breakpoint in line: %s moved to line: %s (in %s).\n' % (add_breakpoint_result.original_line, translated_line, translated_filename) + + if msg: + py_db.writer.add_command(py_db.cmd_factory.make_warning_message(msg)) + + result = self.api.add_breakpoint( + py_db, self.api.filename_to_str(filename), btype, breakpoint_id, line, condition, func_name, + expression, suspend_policy, hit_condition, is_logpoint, on_changed_breakpoint_state=on_changed_breakpoint_state) + + on_changed_breakpoint_state(breakpoint_id, result) + + def cmd_remove_break(self, py_db, cmd_id, seq, text): + # command to remove some breakpoint + # text is type\file\tid. Remove from breakpoints dictionary + breakpoint_type, filename, breakpoint_id = text.split('\t', 2) + + filename = self.api.filename_to_str(filename) + + try: + breakpoint_id = int(breakpoint_id) + except ValueError: + pydev_log.critical('Error removing breakpoint. Expected breakpoint_id to be an int. Found: %s', breakpoint_id) + + else: + self.api.remove_breakpoint(py_db, filename, breakpoint_type, breakpoint_id) + + def _cmd_exec_or_evaluate_expression(self, py_db, cmd_id, seq, text): + # command to evaluate the given expression + # text is: thread\tstackframe\tLOCAL\texpression + attr_to_set_result = "" + try: + thread_id, frame_id, scope, expression, trim, attr_to_set_result = text.split('\t', 5) + except ValueError: + thread_id, frame_id, scope, expression, trim = text.split('\t', 4) + is_exec = cmd_id == CMD_EXEC_EXPRESSION + trim_if_too_big = int(trim) == 1 + + self.api.request_exec_or_evaluate( + py_db, seq, thread_id, frame_id, expression, is_exec, trim_if_too_big, attr_to_set_result) + + cmd_evaluate_expression = _cmd_exec_or_evaluate_expression + cmd_exec_expression = _cmd_exec_or_evaluate_expression + + def cmd_console_exec(self, py_db, cmd_id, seq, text): + # command to exec expression in console, in case expression is only partially valid 'False' is returned + # text is: thread\tstackframe\tLOCAL\texpression + + thread_id, frame_id, scope, expression = text.split('\t', 3) + self.api.request_console_exec(py_db, seq, thread_id, frame_id, expression) + + def cmd_set_path_mapping_json(self, py_db, cmd_id, seq, text): + ''' + :param text: + Json text. Something as: + + { + "pathMappings": [ + { + "localRoot": "c:/temp", + "remoteRoot": "/usr/temp" + } + ], + "debug": true, + "force": false + } + ''' + as_json = json.loads(text) + force = as_json.get('force', False) + + path_mappings = [] + for pathMapping in as_json.get('pathMappings', []): + localRoot = pathMapping.get('localRoot', '') + remoteRoot = pathMapping.get('remoteRoot', '') + if (localRoot != '') and (remoteRoot != ''): + path_mappings.append((localRoot, remoteRoot)) + + if bool(path_mappings) or force: + pydevd_file_utils.setup_client_server_paths(path_mappings) + + debug = as_json.get('debug', False) + if debug or force: + pydevd_file_utils.DEBUG_CLIENT_SERVER_TRANSLATION = debug + + def cmd_set_py_exception_json(self, py_db, cmd_id, seq, text): + # This API is optional and works 'in bulk' -- it's possible + # to get finer-grained control with CMD_ADD_EXCEPTION_BREAK/CMD_REMOVE_EXCEPTION_BREAK + # which allows setting caught/uncaught per exception, although global settings such as: + # - skip_on_exceptions_thrown_in_same_context + # - ignore_exceptions_thrown_in_lines_with_ignore_exception + # must still be set through this API (before anything else as this clears all existing + # exception breakpoints). + try: + py_db.break_on_uncaught_exceptions = {} + py_db.break_on_caught_exceptions = {} + py_db.break_on_user_uncaught_exceptions = {} + + as_json = json.loads(text) + break_on_uncaught = as_json.get('break_on_uncaught', False) + break_on_caught = as_json.get('break_on_caught', False) + break_on_user_caught = as_json.get('break_on_user_caught', False) + py_db.skip_on_exceptions_thrown_in_same_context = as_json.get('skip_on_exceptions_thrown_in_same_context', False) + py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = as_json.get('ignore_exceptions_thrown_in_lines_with_ignore_exception', False) + ignore_libraries = as_json.get('ignore_libraries', False) + exception_types = as_json.get('exception_types', []) + + for exception_type in exception_types: + if not exception_type: + continue + + py_db.add_break_on_exception( + exception_type, + condition=None, + expression=None, + notify_on_handled_exceptions=break_on_caught, + notify_on_unhandled_exceptions=break_on_uncaught, + notify_on_user_unhandled_exceptions=break_on_user_caught, + notify_on_first_raise_only=True, + ignore_libraries=ignore_libraries, + ) + + py_db.on_breakpoints_changed() + except: + pydev_log.exception("Error when setting exception list. Received: %s", text) + + def cmd_set_py_exception(self, py_db, cmd_id, seq, text): + # DEPRECATED. Use cmd_set_py_exception_json instead. + try: + splitted = text.split(';') + py_db.break_on_uncaught_exceptions = {} + py_db.break_on_caught_exceptions = {} + py_db.break_on_user_uncaught_exceptions = {} + if len(splitted) >= 5: + if splitted[0] == 'true': + break_on_uncaught = True + else: + break_on_uncaught = False + + if splitted[1] == 'true': + break_on_caught = True + else: + break_on_caught = False + + if splitted[2] == 'true': + py_db.skip_on_exceptions_thrown_in_same_context = True + else: + py_db.skip_on_exceptions_thrown_in_same_context = False + + if splitted[3] == 'true': + py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = True + else: + py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = False + + if splitted[4] == 'true': + ignore_libraries = True + else: + ignore_libraries = False + + for exception_type in splitted[5:]: + exception_type = exception_type.strip() + if not exception_type: + continue + + py_db.add_break_on_exception( + exception_type, + condition=None, + expression=None, + notify_on_handled_exceptions=break_on_caught, + notify_on_unhandled_exceptions=break_on_uncaught, + notify_on_user_unhandled_exceptions=False, # TODO (not currently supported in this API). + notify_on_first_raise_only=True, + ignore_libraries=ignore_libraries, + ) + else: + pydev_log.exception("Expected to have at least 5 ';' separated items. Received: %s", text) + + except: + pydev_log.exception("Error when setting exception list. Received: %s", text) + + def _load_source(self, py_db, cmd_id, seq, text): + filename = text + filename = self.api.filename_to_str(filename) + self.api.request_load_source(py_db, seq, filename) + + cmd_load_source = _load_source + cmd_get_file_contents = _load_source + + def cmd_load_source_from_frame_id(self, py_db, cmd_id, seq, text): + frame_id = text + self.api.request_load_source_from_frame_id(py_db, seq, frame_id) + + def cmd_set_property_trace(self, py_db, cmd_id, seq, text): + # Command which receives whether to trace property getter/setter/deleter + # text is feature_state(true/false);disable_getter/disable_setter/disable_deleter + if text: + splitted = text.split(';') + if len(splitted) >= 3: + if not py_db.disable_property_trace and splitted[0] == 'true': + # Replacing property by custom property only when the debugger starts + pydevd_traceproperty.replace_builtin_property() + py_db.disable_property_trace = True + # Enable/Disable tracing of the property getter + if splitted[1] == 'true': + py_db.disable_property_getter_trace = True + else: + py_db.disable_property_getter_trace = False + # Enable/Disable tracing of the property setter + if splitted[2] == 'true': + py_db.disable_property_setter_trace = True + else: + py_db.disable_property_setter_trace = False + # Enable/Disable tracing of the property deleter + if splitted[3] == 'true': + py_db.disable_property_deleter_trace = True + else: + py_db.disable_property_deleter_trace = False + + def cmd_add_exception_break(self, py_db, cmd_id, seq, text): + # Note that this message has some idiosyncrasies... + # + # notify_on_handled_exceptions can be 0, 1 or 2 + # 0 means we should not stop on handled exceptions. + # 1 means we should stop on handled exceptions showing it on all frames where the exception passes. + # 2 means we should stop on handled exceptions but we should only notify about it once. + # + # To ignore_libraries properly, besides setting ignore_libraries to 1, the IDE_PROJECT_ROOTS environment + # variable must be set (so, we'll ignore anything not below IDE_PROJECT_ROOTS) -- this is not ideal as + # the environment variable may not be properly set if it didn't start from the debugger (we should + # create a custom message for that). + # + # There are 2 global settings which can only be set in CMD_SET_PY_EXCEPTION. Namely: + # + # py_db.skip_on_exceptions_thrown_in_same_context + # - If True, we should only show the exception in a caller, not where it was first raised. + # + # py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception + # - If True exceptions thrown in lines with '@IgnoreException' will not be shown. + + condition = "" + expression = "" + if text.find('\t') != -1: + try: + exception, condition, expression, notify_on_handled_exceptions, notify_on_unhandled_exceptions, ignore_libraries = text.split('\t', 5) + except: + exception, notify_on_handled_exceptions, notify_on_unhandled_exceptions, ignore_libraries = text.split('\t', 3) + else: + exception, notify_on_handled_exceptions, notify_on_unhandled_exceptions, ignore_libraries = text, 0, 0, 0 + + condition = condition.replace("@_@NEW_LINE_CHAR@_@", '\n').replace("@_@TAB_CHAR@_@", '\t').strip() + + if condition is not None and (len(condition) == 0 or condition == "None"): + condition = None + + expression = expression.replace("@_@NEW_LINE_CHAR@_@", '\n').replace("@_@TAB_CHAR@_@", '\t').strip() + + if expression is not None and (len(expression) == 0 or expression == "None"): + expression = None + + if exception.find('-') != -1: + breakpoint_type, exception = exception.split('-') + else: + breakpoint_type = 'python' + + if breakpoint_type == 'python': + self.api.add_python_exception_breakpoint( + py_db, exception, condition, expression, + notify_on_handled_exceptions=int(notify_on_handled_exceptions) > 0, + notify_on_unhandled_exceptions=int(notify_on_unhandled_exceptions) == 1, + notify_on_user_unhandled_exceptions=0, # TODO (not currently supported in this API). + notify_on_first_raise_only=int(notify_on_handled_exceptions) == 2, + ignore_libraries=int(ignore_libraries) > 0, + ) + else: + self.api.add_plugins_exception_breakpoint(py_db, breakpoint_type, exception) + + def cmd_remove_exception_break(self, py_db, cmd_id, seq, text): + exception = text + if exception.find('-') != -1: + exception_type, exception = exception.split('-') + else: + exception_type = 'python' + + if exception_type == 'python': + self.api.remove_python_exception_breakpoint(py_db, exception) + else: + self.api.remove_plugins_exception_breakpoint(py_db, exception_type, exception) + + def cmd_add_django_exception_break(self, py_db, cmd_id, seq, text): + self.api.add_plugins_exception_breakpoint(py_db, breakpoint_type='django', exception=text) + + def cmd_remove_django_exception_break(self, py_db, cmd_id, seq, text): + self.api.remove_plugins_exception_breakpoint(py_db, exception_type='django', exception=text) + + def cmd_evaluate_console_expression(self, py_db, cmd_id, seq, text): + # Command which takes care for the debug console communication + if text != "": + thread_id, frame_id, console_command = text.split('\t', 2) + console_command, line = console_command.split('\t') + + if console_command == 'EVALUATE': + int_cmd = InternalEvaluateConsoleExpression( + seq, thread_id, frame_id, line, buffer_output=True) + + elif console_command == 'EVALUATE_UNBUFFERED': + int_cmd = InternalEvaluateConsoleExpression( + seq, thread_id, frame_id, line, buffer_output=False) + + elif console_command == 'GET_COMPLETIONS': + int_cmd = InternalConsoleGetCompletions(seq, thread_id, frame_id, line) + + else: + raise ValueError('Unrecognized command: %s' % (console_command,)) + + py_db.post_internal_command(int_cmd, thread_id) + + def cmd_run_custom_operation(self, py_db, cmd_id, seq, text): + # Command which runs a custom operation + if text != "": + try: + location, custom = text.split('||', 1) + except: + sys.stderr.write('Custom operation now needs a || separator. Found: %s\n' % (text,)) + raise + + thread_id, frame_id, scopeattrs = location.split('\t', 2) + + if scopeattrs.find('\t') != -1: # there are attributes beyond scope + scope, attrs = scopeattrs.split('\t', 1) + else: + scope, attrs = (scopeattrs, None) + + # : style: EXECFILE or EXEC + # : encoded_code_or_file: file to execute or code + # : fname: name of function to be executed in the resulting namespace + style, encoded_code_or_file, fnname = custom.split('\t', 3) + int_cmd = InternalRunCustomOperation(seq, thread_id, frame_id, scope, attrs, + style, encoded_code_or_file, fnname) + py_db.post_internal_command(int_cmd, thread_id) + + def cmd_ignore_thrown_exception_at(self, py_db, cmd_id, seq, text): + if text: + replace = 'REPLACE:' # Not all 3.x versions support u'REPLACE:', so, doing workaround. + if text.startswith(replace): + text = text[8:] + py_db.filename_to_lines_where_exceptions_are_ignored.clear() + + if text: + for line in text.split('||'): # Can be bulk-created (one in each line) + original_filename, line_number = line.split('|') + original_filename = self.api.filename_to_server(original_filename) + + canonical_normalized_filename = pydevd_file_utils.canonical_normalized_path(original_filename) + absolute_filename = pydevd_file_utils.absolute_path(original_filename) + + if os.path.exists(absolute_filename): + lines_ignored = py_db.filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename) + if lines_ignored is None: + lines_ignored = py_db.filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {} + lines_ignored[int(line_number)] = 1 + else: + sys.stderr.write('pydev debugger: warning: trying to ignore exception thrown'\ + ' on file that does not exist: %s (will have no effect)\n' % (absolute_filename,)) + + def cmd_enable_dont_trace(self, py_db, cmd_id, seq, text): + if text: + true_str = 'true' # Not all 3.x versions support u'str', so, doing workaround. + mode = text.strip() == true_str + pydevd_dont_trace.trace_filter(mode) + + def cmd_redirect_output(self, py_db, cmd_id, seq, text): + if text: + py_db.enable_output_redirection('STDOUT' in text, 'STDERR' in text) + + def cmd_get_next_statement_targets(self, py_db, cmd_id, seq, text): + thread_id, frame_id = text.split('\t', 1) + + py_db.post_method_as_internal_command( + thread_id, internal_get_next_statement_targets, seq, thread_id, frame_id) + + def cmd_get_smart_step_into_variants(self, py_db, cmd_id, seq, text): + thread_id, frame_id, start_line, end_line = text.split('\t', 3) + + py_db.post_method_as_internal_command( + thread_id, internal_get_smart_step_into_variants, seq, thread_id, frame_id, start_line, end_line, set_additional_thread_info=set_additional_thread_info) + + def cmd_set_project_roots(self, py_db, cmd_id, seq, text): + self.api.set_project_roots(py_db, text.split(u'\t')) + + def cmd_thread_dump_to_stderr(self, py_db, cmd_id, seq, text): + pydevd_utils.dump_threads() + + def cmd_stop_on_start(self, py_db, cmd_id, seq, text): + if text.strip() in ('True', 'true', '1'): + self.api.stop_on_entry() + + def cmd_pydevd_json_config(self, py_db, cmd_id, seq, text): + # Expected to receive a json string as: + # { + # 'skip_suspend_on_breakpoint_exception': [], + # 'skip_print_breakpoint_exception': [], + # 'multi_threads_single_notification': bool, + # } + msg = json.loads(text.strip()) + if 'skip_suspend_on_breakpoint_exception' in msg: + py_db.skip_suspend_on_breakpoint_exception = tuple( + get_exception_class(x) for x in msg['skip_suspend_on_breakpoint_exception']) + + if 'skip_print_breakpoint_exception' in msg: + py_db.skip_print_breakpoint_exception = tuple( + get_exception_class(x) for x in msg['skip_print_breakpoint_exception']) + + if 'multi_threads_single_notification' in msg: + py_db.multi_threads_single_notification = msg['multi_threads_single_notification'] + + def cmd_get_exception_details(self, py_db, cmd_id, seq, text): + thread_id = text + t = pydevd_find_thread_by_id(thread_id) + frame = None + if t and not getattr(t, 'pydev_do_not_trace', None): + additional_info = set_additional_thread_info(t) + frame = additional_info.get_topmost_frame(t) + try: + return py_db.cmd_factory.make_get_exception_details_message(py_db, seq, thread_id, frame) + finally: + frame = None + t = None + + +process_net_command = _PyDevCommandProcessor().process_net_command + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command_json.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command_json.py new file mode 100644 index 0000000..ff01ecd --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command_json.py @@ -0,0 +1,1311 @@ +import itertools +import json +import linecache +import os +import platform +import sys +from functools import partial + +import pydevd_file_utils +from _pydev_bundle import pydev_log +from _pydevd_bundle._debug_adapter import pydevd_base_schema, pydevd_schema +from _pydevd_bundle._debug_adapter.pydevd_schema import ( + CompletionsResponseBody, EvaluateResponseBody, ExceptionOptions, + GotoTargetsResponseBody, ModulesResponseBody, ProcessEventBody, + ProcessEvent, Scope, ScopesResponseBody, SetExpressionResponseBody, + SetVariableResponseBody, SourceBreakpoint, SourceResponseBody, + VariablesResponseBody, SetBreakpointsResponseBody, Response, + Capabilities, PydevdAuthorizeRequest, Request, + StepInTargetsResponseBody, SetFunctionBreakpointsResponseBody, BreakpointEvent, + BreakpointEventBody) +from _pydevd_bundle.pydevd_api import PyDevdAPI +from _pydevd_bundle.pydevd_breakpoints import get_exception_class, FunctionBreakpoint +from _pydevd_bundle.pydevd_comm_constants import ( + CMD_PROCESS_EVENT, CMD_RETURN, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO, + CMD_STEP_INTO_MY_CODE, CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE, file_system_encoding, + CMD_STEP_RETURN_MY_CODE, CMD_STEP_RETURN) +from _pydevd_bundle.pydevd_filtering import ExcludeFilter +from _pydevd_bundle.pydevd_json_debug_options import _extract_debug_options, DebugOptions +from _pydevd_bundle.pydevd_net_command import NetCommand +from _pydevd_bundle.pydevd_utils import convert_dap_log_message_to_expression, ScopeRequest +from _pydevd_bundle.pydevd_constants import (PY_IMPL_NAME, DebugInfoHolder, PY_VERSION_STR, + PY_IMPL_VERSION_STR, IS_64BIT_PROCESS) +from _pydevd_bundle.pydevd_trace_dispatch import USING_CYTHON +from _pydevd_frame_eval.pydevd_frame_eval_main import USING_FRAME_EVAL +from _pydevd_bundle.pydevd_comm import internal_get_step_in_targets_json +from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info +from _pydevd_bundle.pydevd_thread_lifecycle import pydevd_find_thread_by_id + + +def _convert_rules_to_exclude_filters(rules, on_error): + exclude_filters = [] + if not isinstance(rules, list): + on_error('Invalid "rules" (expected list of dicts). Found: %s' % (rules,)) + + else: + directory_exclude_filters = [] + module_exclude_filters = [] + glob_exclude_filters = [] + + for rule in rules: + if not isinstance(rule, dict): + on_error('Invalid "rules" (expected list of dicts). Found: %s' % (rules,)) + continue + + include = rule.get('include') + if include is None: + on_error('Invalid "rule" (expected dict with "include"). Found: %s' % (rule,)) + continue + + path = rule.get('path') + module = rule.get('module') + if path is None and module is None: + on_error('Invalid "rule" (expected dict with "path" or "module"). Found: %s' % (rule,)) + continue + + if path is not None: + glob_pattern = path + if '*' not in path and '?' not in path: + if os.path.isdir(glob_pattern): + # If a directory was specified, add a '/**' + # to be consistent with the glob pattern required + # by pydevd. + if not glob_pattern.endswith('/') and not glob_pattern.endswith('\\'): + glob_pattern += '/' + glob_pattern += '**' + directory_exclude_filters.append(ExcludeFilter(glob_pattern, not include, True)) + else: + glob_exclude_filters.append(ExcludeFilter(glob_pattern, not include, True)) + + elif module is not None: + module_exclude_filters.append(ExcludeFilter(module, not include, False)) + + else: + on_error('Internal error: expected path or module to be specified.') + + # Note that we have to sort the directory/module exclude filters so that the biggest + # paths match first. + # i.e.: if we have: + # /sub1/sub2/sub3 + # a rule with /sub1/sub2 would match before a rule only with /sub1. + directory_exclude_filters = sorted(directory_exclude_filters, key=lambda exclude_filter:-len(exclude_filter.name)) + module_exclude_filters = sorted(module_exclude_filters, key=lambda exclude_filter:-len(exclude_filter.name)) + exclude_filters = directory_exclude_filters + glob_exclude_filters + module_exclude_filters + + return exclude_filters + + +class IDMap(object): + + def __init__(self): + self._value_to_key = {} + self._key_to_value = {} + self._next_id = partial(next, itertools.count(0)) + + def obtain_value(self, key): + return self._key_to_value[key] + + def obtain_key(self, value): + try: + key = self._value_to_key[value] + except KeyError: + key = self._next_id() + self._key_to_value[key] = value + self._value_to_key[value] = key + return key + + +class PyDevJsonCommandProcessor(object): + + def __init__(self, from_json): + self.from_json = from_json + self.api = PyDevdAPI() + self._options = DebugOptions() + self._next_breakpoint_id = partial(next, itertools.count(0)) + self._goto_targets_map = IDMap() + self._launch_or_attach_request_done = False + + def process_net_command_json(self, py_db, json_contents, send_response=True): + ''' + Processes a debug adapter protocol json command. + ''' + + DEBUG = False + + try: + if isinstance(json_contents, bytes): + json_contents = json_contents.decode('utf-8') + + request = self.from_json(json_contents, update_ids_from_dap=True) + except Exception as e: + try: + loaded_json = json.loads(json_contents) + request = Request(loaded_json.get('command', ''), loaded_json['seq']) + except: + # There's not much we can do in this case... + pydev_log.exception('Error loading json: %s', json_contents) + return + + error_msg = str(e) + if error_msg.startswith("'") and error_msg.endswith("'"): + error_msg = error_msg[1:-1] + + # This means a failure processing the request (but we were able to load the seq, + # so, answer with a failure response). + def on_request(py_db, request): + error_response = { + 'type': 'response', + 'request_seq': request.seq, + 'success': False, + 'command': request.command, + 'message': error_msg, + } + return NetCommand(CMD_RETURN, 0, error_response, is_json=True) + + else: + if DebugInfoHolder.DEBUG_RECORD_SOCKET_READS and DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1: + pydev_log.info('Process %s: %s\n' % ( + request.__class__.__name__, json.dumps(request.to_dict(update_ids_to_dap=True), indent=4, sort_keys=True),)) + + assert request.type == 'request' + method_name = 'on_%s_request' % (request.command.lower(),) + on_request = getattr(self, method_name, None) + if on_request is None: + print('Unhandled: %s not available in PyDevJsonCommandProcessor.\n' % (method_name,)) + return + + if DEBUG: + print('Handled in pydevd: %s (in PyDevJsonCommandProcessor).\n' % (method_name,)) + + with py_db._main_lock: + if request.__class__ == PydevdAuthorizeRequest: + authorize_request = request # : :type authorize_request: PydevdAuthorizeRequest + access_token = authorize_request.arguments.debugServerAccessToken + py_db.authentication.login(access_token) + + if not py_db.authentication.is_authenticated(): + response = Response( + request.seq, success=False, command=request.command, message='Client not authenticated.', body={}) + cmd = NetCommand(CMD_RETURN, 0, response, is_json=True) + py_db.writer.add_command(cmd) + return + + cmd = on_request(py_db, request) + if cmd is not None and send_response: + py_db.writer.add_command(cmd) + + def on_pydevdauthorize_request(self, py_db, request): + client_access_token = py_db.authentication.client_access_token + body = {'clientAccessToken': None} + if client_access_token: + body['clientAccessToken'] = client_access_token + + response = pydevd_base_schema.build_response(request, kwargs={'body': body}) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_initialize_request(self, py_db, request): + body = Capabilities( + # Supported. + supportsConfigurationDoneRequest=True, + supportsConditionalBreakpoints=True, + supportsHitConditionalBreakpoints=True, + supportsEvaluateForHovers=True, + supportsSetVariable=True, + supportsGotoTargetsRequest=True, + supportsCompletionsRequest=True, + supportsModulesRequest=True, + supportsExceptionOptions=True, + supportsValueFormattingOptions=True, + supportsExceptionInfoRequest=True, + supportTerminateDebuggee=True, + supportsDelayedStackTraceLoading=True, + supportsLogPoints=True, + supportsSetExpression=True, + supportsTerminateRequest=True, + supportsClipboardContext=True, + supportsFunctionBreakpoints=True, + + exceptionBreakpointFilters=[ + {'filter': 'raised', 'label': 'Raised Exceptions', 'default': False}, + {'filter': 'uncaught', 'label': 'Uncaught Exceptions', 'default': True}, + {"filter": "userUnhandled", "label": "User Uncaught Exceptions", "default": False}, + ], + + # Not supported. + supportsStepBack=False, + supportsRestartFrame=False, + supportsStepInTargetsRequest=True, + supportsRestartRequest=False, + supportsLoadedSourcesRequest=False, + supportsTerminateThreadsRequest=False, + supportsDataBreakpoints=False, + supportsReadMemoryRequest=False, + supportsDisassembleRequest=False, + additionalModuleColumns=[], + completionTriggerCharacters=[], + supportedChecksumAlgorithms=[], + ).to_dict() + + # Non-standard capabilities/info below. + body['supportsDebuggerProperties'] = True + + body['pydevd'] = pydevd_info = {} + pydevd_info['processId'] = os.getpid() + self.api.notify_initialize(py_db) + response = pydevd_base_schema.build_response(request, kwargs={'body': body}) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_configurationdone_request(self, py_db, request): + ''' + :param ConfigurationDoneRequest request: + ''' + if not self._launch_or_attach_request_done: + pydev_log.critical('Missing launch request or attach request before configuration done request.') + + self.api.run(py_db) + self.api.notify_configuration_done(py_db) + + configuration_done_response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, configuration_done_response, is_json=True) + + def on_threads_request(self, py_db, request): + ''' + :param ThreadsRequest request: + ''' + return self.api.list_threads(py_db, request.seq) + + def on_terminate_request(self, py_db, request): + ''' + :param TerminateRequest request: + ''' + self._request_terminate_process(py_db) + response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def _request_terminate_process(self, py_db): + self.api.request_terminate_process(py_db) + + def on_completions_request(self, py_db, request): + ''' + :param CompletionsRequest request: + ''' + arguments = request.arguments # : :type arguments: CompletionsArguments + seq = request.seq + text = arguments.text + frame_id = arguments.frameId + thread_id = py_db.suspended_frames_manager.get_thread_id_for_variable_reference( + frame_id) + + if thread_id is None: + body = CompletionsResponseBody([]) + variables_response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': body, + 'success': False, + 'message': 'Thread to get completions seems to have resumed already.' + }) + return NetCommand(CMD_RETURN, 0, variables_response, is_json=True) + + # Note: line and column are 1-based (convert to 0-based for pydevd). + column = arguments.column - 1 + + if arguments.line is None: + # line is optional + line = -1 + else: + line = arguments.line - 1 + + self.api.request_completions(py_db, seq, thread_id, frame_id, text, line=line, column=column) + + def _resolve_remote_root(self, local_root, remote_root): + if remote_root == '.': + cwd = os.getcwd() + append_pathsep = local_root.endswith('\\') or local_root.endswith('/') + return cwd + (os.path.sep if append_pathsep else '') + return remote_root + + def _set_debug_options(self, py_db, args, start_reason): + rules = args.get('rules') + stepping_resumes_all_threads = args.get('steppingResumesAllThreads', True) + self.api.set_stepping_resumes_all_threads(py_db, stepping_resumes_all_threads) + + terminate_child_processes = args.get('terminateChildProcesses', True) + self.api.set_terminate_child_processes(py_db, terminate_child_processes) + + variable_presentation = args.get('variablePresentation', None) + if isinstance(variable_presentation, dict): + + def get_variable_presentation(setting, default): + value = variable_presentation.get(setting, default) + if value not in ('group', 'inline', 'hide'): + pydev_log.info( + 'The value set for "%s" (%s) in the variablePresentation is not valid. Valid values are: "group", "inline", "hide"' % ( + setting, value,)) + value = default + + return value + + default = get_variable_presentation('all', 'group') + + special_presentation = get_variable_presentation('special', default) + function_presentation = get_variable_presentation('function', default) + class_presentation = get_variable_presentation('class', default) + protected_presentation = get_variable_presentation('protected', default) + + self.api.set_variable_presentation(py_db, self.api.VariablePresentation( + special_presentation, + function_presentation, + class_presentation, + protected_presentation + )) + + exclude_filters = [] + + if rules is not None: + exclude_filters = _convert_rules_to_exclude_filters( + rules, lambda msg: self.api.send_error_message(py_db, msg)) + + self.api.set_exclude_filters(py_db, exclude_filters) + + debug_options = _extract_debug_options( + args.get('options'), + args.get('debugOptions'), + ) + self._options.update_fom_debug_options(debug_options) + self._options.update_from_args(args) + + self.api.set_use_libraries_filter(py_db, self._options.just_my_code) + + path_mappings = [] + for pathMapping in args.get('pathMappings', []): + localRoot = pathMapping.get('localRoot', '') + remoteRoot = pathMapping.get('remoteRoot', '') + remoteRoot = self._resolve_remote_root(localRoot, remoteRoot) + if (localRoot != '') and (remoteRoot != ''): + path_mappings.append((localRoot, remoteRoot)) + + if bool(path_mappings): + pydevd_file_utils.setup_client_server_paths(path_mappings) + + resolve_symlinks = args.get('resolveSymlinks', None) + if resolve_symlinks is not None: + pydevd_file_utils.set_resolve_symlinks(resolve_symlinks) + + redirecting = args.get("isOutputRedirected") + if self._options.redirect_output: + py_db.enable_output_redirection(True, True) + redirecting = True + else: + py_db.enable_output_redirection(False, False) + + py_db.is_output_redirected = redirecting + + self.api.set_show_return_values(py_db, self._options.show_return_value) + + if not self._options.break_system_exit_zero: + ignore_system_exit_codes = [0, None] + if self._options.django_debug or self._options.flask_debug: + ignore_system_exit_codes += [3] + + self.api.set_ignore_system_exit_codes(py_db, ignore_system_exit_codes) + + auto_reload = args.get('autoReload', {}) + if not isinstance(auto_reload, dict): + pydev_log.info('Expected autoReload to be a dict. Received: %s' % (auto_reload,)) + auto_reload = {} + + enable_auto_reload = auto_reload.get('enable', False) + watch_dirs = auto_reload.get('watchDirectories') + if not watch_dirs: + watch_dirs = [] + # Note: by default this is no longer done because on some cases there are entries in the PYTHONPATH + # such as the home directory or /python/x64, where the site packages are in /python/x64/libs, so, + # we only watch the current working directory as well as executed script. + # check = getattr(sys, 'path', [])[:] + # # By default only watch directories that are in the project roots / + # # program dir (if available), sys.argv[0], as well as the current dir (we don't want to + # # listen to the whole site-packages by default as it can be huge). + # watch_dirs = [pydevd_file_utils.absolute_path(w) for w in check] + # watch_dirs = [w for w in watch_dirs if py_db.in_project_roots_filename_uncached(w) and os.path.isdir(w)] + + program = args.get('program') + if program: + if os.path.isdir(program): + watch_dirs.append(program) + else: + watch_dirs.append(os.path.dirname(program)) + watch_dirs.append(os.path.abspath('.')) + + argv = getattr(sys, 'argv', []) + if argv: + f = argv[0] + if f: # argv[0] could be None (https://github.com/microsoft/debugpy/issues/987) + if os.path.isdir(f): + watch_dirs.append(f) + else: + watch_dirs.append(os.path.dirname(f)) + + if not isinstance(watch_dirs, (list, set, tuple)): + watch_dirs = (watch_dirs,) + new_watch_dirs = set() + for w in watch_dirs: + try: + new_watch_dirs.add(pydevd_file_utils.get_path_with_real_case(pydevd_file_utils.absolute_path(w))) + except Exception: + pydev_log.exception('Error adding watch dir: %s', w) + watch_dirs = new_watch_dirs + + poll_target_time = auto_reload.get('pollingInterval', 1) + exclude_patterns = auto_reload.get('exclude', ('**/.git/**', '**/__pycache__/**', '**/node_modules/**', '**/.metadata/**', '**/site-packages/**')) + include_patterns = auto_reload.get('include', ('**/*.py', '**/*.pyw')) + self.api.setup_auto_reload_watcher( + py_db, enable_auto_reload, watch_dirs, poll_target_time, exclude_patterns, include_patterns) + + if self._options.stop_on_entry and start_reason == 'launch': + self.api.stop_on_entry() + + self.api.set_gui_event_loop(py_db, self._options.gui_event_loop) + + def _send_process_event(self, py_db, start_method): + argv = getattr(sys, 'argv', []) + if len(argv) > 0: + name = argv[0] + else: + name = '' + + if isinstance(name, bytes): + name = name.decode(file_system_encoding, 'replace') + name = name.encode('utf-8') + + body = ProcessEventBody( + name=name, + systemProcessId=os.getpid(), + isLocalProcess=True, + startMethod=start_method, + ) + event = ProcessEvent(body) + py_db.writer.add_command(NetCommand(CMD_PROCESS_EVENT, 0, event, is_json=True)) + + def _handle_launch_or_attach_request(self, py_db, request, start_reason): + self._send_process_event(py_db, start_reason) + self._launch_or_attach_request_done = True + self.api.set_enable_thread_notifications(py_db, True) + self._set_debug_options(py_db, request.arguments.kwargs, start_reason=start_reason) + response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_launch_request(self, py_db, request): + ''' + :param LaunchRequest request: + ''' + return self._handle_launch_or_attach_request(py_db, request, start_reason='launch') + + def on_attach_request(self, py_db, request): + ''' + :param AttachRequest request: + ''' + return self._handle_launch_or_attach_request(py_db, request, start_reason='attach') + + def on_pause_request(self, py_db, request): + ''' + :param PauseRequest request: + ''' + arguments = request.arguments # : :type arguments: PauseArguments + thread_id = arguments.threadId + + self.api.request_suspend_thread(py_db, thread_id=thread_id) + + response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_continue_request(self, py_db, request): + ''' + :param ContinueRequest request: + ''' + arguments = request.arguments # : :type arguments: ContinueArguments + thread_id = arguments.threadId + + def on_resumed(): + body = {'allThreadsContinued': thread_id == '*'} + response = pydevd_base_schema.build_response(request, kwargs={'body': body}) + cmd = NetCommand(CMD_RETURN, 0, response, is_json=True) + py_db.writer.add_command(cmd) + + # Only send resumed notification when it has actually resumed! + # (otherwise the user could send a continue, receive the notification and then + # request a new pause which would be paused without sending any notification as + # it didn't really run in the first place). + py_db.threads_suspended_single_notification.add_on_resumed_callback(on_resumed) + self.api.request_resume_thread(thread_id) + + def on_next_request(self, py_db, request): + ''' + :param NextRequest request: + ''' + arguments = request.arguments # : :type arguments: NextArguments + thread_id = arguments.threadId + + if py_db.get_use_libraries_filter(): + step_cmd_id = CMD_STEP_OVER_MY_CODE + else: + step_cmd_id = CMD_STEP_OVER + + self.api.request_step(py_db, thread_id, step_cmd_id) + + response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_stepin_request(self, py_db, request): + ''' + :param StepInRequest request: + ''' + arguments = request.arguments # : :type arguments: StepInArguments + thread_id = arguments.threadId + + target_id = arguments.targetId + if target_id is not None: + thread = pydevd_find_thread_by_id(thread_id) + info = set_additional_thread_info(thread) + target_id_to_smart_step_into_variant = info.target_id_to_smart_step_into_variant + if not target_id_to_smart_step_into_variant: + variables_response = pydevd_base_schema.build_response( + request, + kwargs={ + 'success': False, + 'message': 'Unable to step into target (no targets are saved in the thread info).' + }) + return NetCommand(CMD_RETURN, 0, variables_response, is_json=True) + + variant = target_id_to_smart_step_into_variant.get(target_id) + if variant is not None: + parent = variant.parent + if parent is not None: + self.api.request_smart_step_into(py_db, request.seq, thread_id, parent.offset, variant.offset) + else: + self.api.request_smart_step_into(py_db, request.seq, thread_id, variant.offset, -1) + else: + variables_response = pydevd_base_schema.build_response( + request, + kwargs={ + 'success': False, + 'message': 'Unable to find step into target %s. Available targets: %s' % ( + target_id, target_id_to_smart_step_into_variant) + }) + return NetCommand(CMD_RETURN, 0, variables_response, is_json=True) + + else: + if py_db.get_use_libraries_filter(): + step_cmd_id = CMD_STEP_INTO_MY_CODE + else: + step_cmd_id = CMD_STEP_INTO + + self.api.request_step(py_db, thread_id, step_cmd_id) + + response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_stepintargets_request(self, py_db, request): + ''' + :param StepInTargetsRequest request: + ''' + frame_id = request.arguments.frameId + thread_id = py_db.suspended_frames_manager.get_thread_id_for_variable_reference( + frame_id) + + if thread_id is None: + body = StepInTargetsResponseBody([]) + variables_response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': body, + 'success': False, + 'message': 'Unable to get thread_id from frame_id (thread to get step in targets seems to have resumed already).' + }) + return NetCommand(CMD_RETURN, 0, variables_response, is_json=True) + + py_db.post_method_as_internal_command( + thread_id, internal_get_step_in_targets_json, request.seq, thread_id, frame_id, request, set_additional_thread_info) + + def on_stepout_request(self, py_db, request): + ''' + :param StepOutRequest request: + ''' + arguments = request.arguments # : :type arguments: StepOutArguments + thread_id = arguments.threadId + + if py_db.get_use_libraries_filter(): + step_cmd_id = CMD_STEP_RETURN_MY_CODE + else: + step_cmd_id = CMD_STEP_RETURN + + self.api.request_step(py_db, thread_id, step_cmd_id) + + response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def _get_hit_condition_expression(self, hit_condition): + '''Following hit condition values are supported + + * x or == x when breakpoint is hit x times + * >= x when breakpoint is hit more than or equal to x times + * % x when breakpoint is hit multiple of x times + + Returns '@HIT@ == x' where @HIT@ will be replaced by number of hits + ''' + if not hit_condition: + return None + + expr = hit_condition.strip() + try: + int(expr) + return '@HIT@ == {}'.format(expr) + except ValueError: + pass + + if expr.startswith('%'): + return '@HIT@ {} == 0'.format(expr) + + if expr.startswith('==') or \ + expr.startswith('>') or \ + expr.startswith('<'): + return '@HIT@ {}'.format(expr) + + return hit_condition + + def on_disconnect_request(self, py_db, request): + ''' + :param DisconnectRequest request: + ''' + if request.arguments.terminateDebuggee: + self._request_terminate_process(py_db) + response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + self._launch_or_attach_request_done = False + py_db.enable_output_redirection(False, False) + self.api.request_disconnect(py_db, resume_threads=True) + + response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def _verify_launch_or_attach_done(self, request): + if not self._launch_or_attach_request_done: + # Note that to validate the breakpoints we need the launch request to be done already + # (otherwise the filters wouldn't be set for the breakpoint validation). + if request.command == 'setFunctionBreakpoints': + body = SetFunctionBreakpointsResponseBody([]) + else: + body = SetBreakpointsResponseBody([]) + response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': body, + 'success': False, + 'message': 'Breakpoints may only be set after the launch request is received.' + }) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_setfunctionbreakpoints_request(self, py_db, request): + ''' + :param SetFunctionBreakpointsRequest request: + ''' + response = self._verify_launch_or_attach_done(request) + if response is not None: + return response + + arguments = request.arguments # : :type arguments: SetFunctionBreakpointsArguments + function_breakpoints = [] + suspend_policy = 'ALL' + + # Not currently covered by the DAP. + is_logpoint = False + expression = None + + breakpoints_set = [] + for bp in arguments.breakpoints: + hit_condition = self._get_hit_condition_expression(bp.get('hitCondition')) + condition = bp.get('condition') + + function_breakpoints.append( + FunctionBreakpoint(bp['name'], condition, expression, suspend_policy, hit_condition, is_logpoint)) + + # Note: always succeeds. + breakpoints_set.append(pydevd_schema.Breakpoint( + verified=True, id=self._next_breakpoint_id()).to_dict()) + + self.api.set_function_breakpoints(py_db, function_breakpoints) + + body = {'breakpoints': breakpoints_set} + set_breakpoints_response = pydevd_base_schema.build_response(request, kwargs={'body': body}) + return NetCommand(CMD_RETURN, 0, set_breakpoints_response, is_json=True) + + def on_setbreakpoints_request(self, py_db, request): + ''' + :param SetBreakpointsRequest request: + ''' + response = self._verify_launch_or_attach_done(request) + if response is not None: + return response + + arguments = request.arguments # : :type arguments: SetBreakpointsArguments + # TODO: Path is optional here it could be source reference. + filename = self.api.filename_to_str(arguments.source.path) + func_name = 'None' + + self.api.remove_all_breakpoints(py_db, filename) + + btype = 'python-line' + suspend_policy = 'ALL' + + if not filename.lower().endswith('.py'): # Note: check based on original file, not mapping. + if self._options.django_debug: + btype = 'django-line' + elif self._options.flask_debug: + btype = 'jinja2-line' + + breakpoints_set = [] + + for source_breakpoint in arguments.breakpoints: + source_breakpoint = SourceBreakpoint(**source_breakpoint) + line = source_breakpoint.line + condition = source_breakpoint.condition + breakpoint_id = self._next_breakpoint_id() + + hit_condition = self._get_hit_condition_expression(source_breakpoint.hitCondition) + log_message = source_breakpoint.logMessage + if not log_message: + is_logpoint = None + expression = None + else: + is_logpoint = True + expression = convert_dap_log_message_to_expression(log_message) + + on_changed_breakpoint_state = partial(self._on_changed_breakpoint_state, py_db, arguments.source) + result = self.api.add_breakpoint( + py_db, filename, btype, breakpoint_id, line, condition, func_name, expression, + suspend_policy, hit_condition, is_logpoint, adjust_line=True, on_changed_breakpoint_state=on_changed_breakpoint_state) + + bp = self._create_breakpoint_from_add_breakpoint_result(py_db, arguments.source, breakpoint_id, result) + breakpoints_set.append(bp) + + body = {'breakpoints': breakpoints_set} + set_breakpoints_response = pydevd_base_schema.build_response(request, kwargs={'body': body}) + return NetCommand(CMD_RETURN, 0, set_breakpoints_response, is_json=True) + + def _on_changed_breakpoint_state(self, py_db, source, breakpoint_id, result): + bp = self._create_breakpoint_from_add_breakpoint_result(py_db, source, breakpoint_id, result) + body = BreakpointEventBody( + reason='changed', + breakpoint=bp, + ) + event = BreakpointEvent(body) + event_id = 0 # Actually ignored in this case + py_db.writer.add_command(NetCommand(event_id, 0, event, is_json=True)) + + def _create_breakpoint_from_add_breakpoint_result(self, py_db, source, breakpoint_id, result): + error_code = result.error_code + + if error_code: + if error_code == self.api.ADD_BREAKPOINT_FILE_NOT_FOUND: + error_msg = 'Breakpoint in file that does not exist.' + + elif error_code == self.api.ADD_BREAKPOINT_FILE_EXCLUDED_BY_FILTERS: + error_msg = 'Breakpoint in file excluded by filters.' + if py_db.get_use_libraries_filter(): + error_msg += ('\nNote: may be excluded because of "justMyCode" option (default == true).' + 'Try setting \"justMyCode\": false in the debug configuration (e.g., launch.json).\n') + + elif error_code == self.api.ADD_BREAKPOINT_LAZY_VALIDATION: + error_msg = 'Waiting for code to be loaded to verify breakpoint.' + + elif error_code == self.api.ADD_BREAKPOINT_INVALID_LINE: + error_msg = 'Breakpoint added to invalid line.' + + else: + # Shouldn't get here. + error_msg = 'Breakpoint not validated (reason unknown -- please report as bug).' + + return pydevd_schema.Breakpoint( + verified=False, id=breakpoint_id, line=result.translated_line, message=error_msg, source=source).to_dict() + else: + return pydevd_schema.Breakpoint( + verified=True, id=breakpoint_id, line=result.translated_line, source=source).to_dict() + + def on_setexceptionbreakpoints_request(self, py_db, request): + ''' + :param SetExceptionBreakpointsRequest request: + ''' + # : :type arguments: SetExceptionBreakpointsArguments + arguments = request.arguments + filters = arguments.filters + exception_options = arguments.exceptionOptions + self.api.remove_all_exception_breakpoints(py_db) + + # Can't set these in the DAP. + condition = None + expression = None + notify_on_first_raise_only = False + + ignore_libraries = 1 if py_db.get_use_libraries_filter() else 0 + + if exception_options: + break_raised = False + break_uncaught = False + + for option in exception_options: + option = ExceptionOptions(**option) + if not option.path: + continue + + # never: never breaks + # + # always: always breaks + # + # unhandled: breaks when exception unhandled + # + # userUnhandled: breaks if the exception is not handled by user code + + notify_on_handled_exceptions = 1 if option.breakMode == 'always' else 0 + notify_on_unhandled_exceptions = 1 if option.breakMode == 'unhandled' else 0 + notify_on_user_unhandled_exceptions = 1 if option.breakMode == 'userUnhandled' else 0 + exception_paths = option.path + break_raised |= notify_on_handled_exceptions + break_uncaught |= notify_on_unhandled_exceptions + + exception_names = [] + if len(exception_paths) == 0: + continue + + elif len(exception_paths) == 1: + if 'Python Exceptions' in exception_paths[0]['names']: + exception_names = ['BaseException'] + + else: + path_iterator = iter(exception_paths) + if 'Python Exceptions' in next(path_iterator)['names']: + for path in path_iterator: + for ex_name in path['names']: + exception_names.append(ex_name) + + for exception_name in exception_names: + self.api.add_python_exception_breakpoint( + py_db, + exception_name, + condition, + expression, + notify_on_handled_exceptions, + notify_on_unhandled_exceptions, + notify_on_user_unhandled_exceptions, + notify_on_first_raise_only, + ignore_libraries + ) + + else: + break_raised = 'raised' in filters + break_uncaught = 'uncaught' in filters + break_user = 'userUnhandled' in filters + if break_raised or break_uncaught or break_user: + notify_on_handled_exceptions = 1 if break_raised else 0 + notify_on_unhandled_exceptions = 1 if break_uncaught else 0 + notify_on_user_unhandled_exceptions = 1 if break_user else 0 + exception = 'BaseException' + + self.api.add_python_exception_breakpoint( + py_db, + exception, + condition, + expression, + notify_on_handled_exceptions, + notify_on_unhandled_exceptions, + notify_on_user_unhandled_exceptions, + notify_on_first_raise_only, + ignore_libraries + ) + + if break_raised: + btype = None + if self._options.django_debug: + btype = 'django' + elif self._options.flask_debug: + btype = 'jinja2' + + if btype: + self.api.add_plugins_exception_breakpoint( + py_db, btype, 'BaseException') # Note: Exception name could be anything here. + + # Note: no body required on success. + set_breakpoints_response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, set_breakpoints_response, is_json=True) + + def on_stacktrace_request(self, py_db, request): + ''' + :param StackTraceRequest request: + ''' + # : :type stack_trace_arguments: StackTraceArguments + stack_trace_arguments = request.arguments + thread_id = stack_trace_arguments.threadId + + if stack_trace_arguments.startFrame: + start_frame = int(stack_trace_arguments.startFrame) + else: + start_frame = 0 + + if stack_trace_arguments.levels: + levels = int(stack_trace_arguments.levels) + else: + levels = 0 + + fmt = stack_trace_arguments.format + if hasattr(fmt, 'to_dict'): + fmt = fmt.to_dict() + self.api.request_stack(py_db, request.seq, thread_id, fmt=fmt, start_frame=start_frame, levels=levels) + + def on_exceptioninfo_request(self, py_db, request): + ''' + :param ExceptionInfoRequest request: + ''' + # : :type exception_into_arguments: ExceptionInfoArguments + exception_into_arguments = request.arguments + thread_id = exception_into_arguments.threadId + max_frames = self._options.max_exception_stack_frames + self.api.request_exception_info_json(py_db, request, thread_id, max_frames) + + def on_scopes_request(self, py_db, request): + ''' + Scopes are the top-level items which appear for a frame (so, we receive the frame id + and provide the scopes it has). + + :param ScopesRequest request: + ''' + frame_id = request.arguments.frameId + + variables_reference = frame_id + scopes = [ + Scope('Locals', ScopeRequest(int(variables_reference), 'locals'), False, presentationHint='locals'), + Scope('Globals', ScopeRequest(int(variables_reference), 'globals'), False), + ] + body = ScopesResponseBody(scopes) + scopes_response = pydevd_base_schema.build_response(request, kwargs={'body': body}) + return NetCommand(CMD_RETURN, 0, scopes_response, is_json=True) + + def on_evaluate_request(self, py_db, request): + ''' + :param EvaluateRequest request: + ''' + # : :type arguments: EvaluateArguments + arguments = request.arguments + + if arguments.frameId is None: + self.api.request_exec_or_evaluate_json(py_db, request, thread_id='*') + else: + thread_id = py_db.suspended_frames_manager.get_thread_id_for_variable_reference( + arguments.frameId) + + if thread_id is not None: + self.api.request_exec_or_evaluate_json( + py_db, request, thread_id) + else: + body = EvaluateResponseBody('', 0) + response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': body, + 'success': False, + 'message': 'Unable to find thread for evaluation.' + }) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_setexpression_request(self, py_db, request): + # : :type arguments: SetExpressionArguments + arguments = request.arguments + + thread_id = py_db.suspended_frames_manager.get_thread_id_for_variable_reference( + arguments.frameId) + + if thread_id is not None: + self.api.request_set_expression_json(py_db, request, thread_id) + else: + body = SetExpressionResponseBody('') + response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': body, + 'success': False, + 'message': 'Unable to find thread to set expression.' + }) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_variables_request(self, py_db, request): + ''' + Variables can be asked whenever some place returned a variables reference (so, it + can be a scope gotten from on_scopes_request, the result of some evaluation, etc.). + + Note that in the DAP the variables reference requires a unique int... the way this works for + pydevd is that an instance is generated for that specific variable reference and we use its + id(instance) to identify it to make sure all items are unique (and the actual {id->instance} + is added to a dict which is only valid while the thread is suspended and later cleared when + the related thread resumes execution). + + see: SuspendedFramesManager + + :param VariablesRequest request: + ''' + arguments = request.arguments # : :type arguments: VariablesArguments + variables_reference = arguments.variablesReference + + if isinstance(variables_reference, ScopeRequest): + variables_reference = variables_reference.variable_reference + + thread_id = py_db.suspended_frames_manager.get_thread_id_for_variable_reference( + variables_reference) + if thread_id is not None: + self.api.request_get_variable_json(py_db, request, thread_id) + else: + variables = [] + body = VariablesResponseBody(variables) + variables_response = pydevd_base_schema.build_response(request, kwargs={ + 'body': body, + 'success': False, + 'message': 'Unable to find thread to evaluate variable reference.' + }) + return NetCommand(CMD_RETURN, 0, variables_response, is_json=True) + + def on_setvariable_request(self, py_db, request): + arguments = request.arguments # : :type arguments: SetVariableArguments + variables_reference = arguments.variablesReference + + if isinstance(variables_reference, ScopeRequest): + variables_reference = variables_reference.variable_reference + + if arguments.name.startswith('(return) '): + response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': SetVariableResponseBody(''), + 'success': False, + 'message': 'Cannot change return value' + }) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + thread_id = py_db.suspended_frames_manager.get_thread_id_for_variable_reference( + variables_reference) + + if thread_id is not None: + self.api.request_change_variable_json(py_db, request, thread_id) + else: + response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': SetVariableResponseBody(''), + 'success': False, + 'message': 'Unable to find thread to evaluate variable reference.' + }) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_modules_request(self, py_db, request): + modules_manager = py_db.cmd_factory.modules_manager # : :type modules_manager: ModulesManager + modules_info = modules_manager.get_modules_info() + body = ModulesResponseBody(modules_info) + variables_response = pydevd_base_schema.build_response(request, kwargs={'body': body}) + return NetCommand(CMD_RETURN, 0, variables_response, is_json=True) + + def on_source_request(self, py_db, request): + ''' + :param SourceRequest request: + ''' + source_reference = request.arguments.sourceReference + server_filename = None + content = None + + if source_reference != 0: + server_filename = pydevd_file_utils.get_server_filename_from_source_reference(source_reference) + if not server_filename: + server_filename = pydevd_file_utils.get_source_reference_filename_from_linecache(source_reference) + + if server_filename: + # Try direct file access first - it's much faster when available. + try: + with open(server_filename, 'r') as stream: + content = stream.read() + except: + pass + + if content is None: + # File might not exist at all, or we might not have a permission to read it, + # but it might also be inside a zipfile, or an IPython cell. In this case, + # linecache might still be able to retrieve the source. + lines = (linecache.getline(server_filename, i) for i in itertools.count(1)) + lines = itertools.takewhile(bool, lines) # empty lines are '\n', EOF is '' + + # If we didn't get at least one line back, reset it to None so that it's + # reported as error below, and not as an empty file. + content = ''.join(lines) or None + + if content is None: + frame_id = pydevd_file_utils.get_frame_id_from_source_reference(source_reference) + pydev_log.debug('Found frame id: %s for source reference: %s', frame_id, source_reference) + if frame_id is not None: + try: + content = self.api.get_decompiled_source_from_frame_id(py_db, frame_id) + except Exception: + pydev_log.exception('Error getting source for frame id: %s', frame_id) + content = None + + body = SourceResponseBody(content or '') + response_args = {'body': body} + + if content is None: + if source_reference == 0: + message = 'Source unavailable' + elif server_filename: + message = 'Unable to retrieve source for %s' % (server_filename,) + else: + message = 'Invalid sourceReference %d' % (source_reference,) + response_args.update({'success': False, 'message': message}) + + response = pydevd_base_schema.build_response(request, kwargs=response_args) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_gototargets_request(self, py_db, request): + path = request.arguments.source.path + line = request.arguments.line + target_id = self._goto_targets_map.obtain_key((path, line)) + target = { + 'id': target_id, + 'label': '%s:%s' % (path, line), + 'line': line + } + body = GotoTargetsResponseBody(targets=[target]) + response_args = {'body': body} + response = pydevd_base_schema.build_response(request, kwargs=response_args) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_goto_request(self, py_db, request): + target_id = int(request.arguments.targetId) + thread_id = request.arguments.threadId + try: + path, line = self._goto_targets_map.obtain_value(target_id) + except KeyError: + response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': {}, + 'success': False, + 'message': 'Unknown goto target id: %d' % (target_id,), + }) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + self.api.request_set_next(py_db, request.seq, thread_id, CMD_SET_NEXT_STATEMENT, path, line, '*') + # See 'NetCommandFactoryJson.make_set_next_stmnt_status_message' for response + return None + + def on_setdebuggerproperty_request(self, py_db, request): + args = request.arguments # : :type args: SetDebuggerPropertyArguments + if args.ideOS is not None: + self.api.set_ide_os(args.ideOS) + + if args.dontTraceStartPatterns is not None and args.dontTraceEndPatterns is not None: + start_patterns = tuple(args.dontTraceStartPatterns) + end_patterns = tuple(args.dontTraceEndPatterns) + self.api.set_dont_trace_start_end_patterns(py_db, start_patterns, end_patterns) + + if args.skipSuspendOnBreakpointException is not None: + py_db.skip_suspend_on_breakpoint_exception = tuple( + get_exception_class(x) for x in args.skipSuspendOnBreakpointException) + + if args.skipPrintBreakpointException is not None: + py_db.skip_print_breakpoint_exception = tuple( + get_exception_class(x) for x in args.skipPrintBreakpointException) + + if args.multiThreadsSingleNotification is not None: + py_db.multi_threads_single_notification = args.multiThreadsSingleNotification + + # TODO: Support other common settings. Note that not all of these might be relevant to python. + # JustMyCodeStepping: 0 or 1 + # AllowOutOfProcessSymbols: 0 or 1 + # DisableJITOptimization: 0 or 1 + # InterpreterOptions: 0 or 1 + # StopOnExceptionCrossingManagedBoundary: 0 or 1 + # WarnIfNoUserCodeOnLaunch: 0 or 1 + # EnableStepFiltering: true of false + + response = pydevd_base_schema.build_response(request, kwargs={'body': {}}) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_pydevdsysteminfo_request(self, py_db, request): + try: + pid = os.getpid() + except AttributeError: + pid = None + + # It's possible to have the ppid reported from args. In this case, use that instead of the + # real ppid (athough we're using `ppid`, what we want in meaning is the `launcher_pid` -- + # so, if a python process is launched from another python process, consider that process the + # parent and not any intermediary stubs). + + ppid = py_db.get_arg_ppid() or self.api.get_ppid() + + try: + impl_desc = platform.python_implementation() + except AttributeError: + impl_desc = PY_IMPL_NAME + + py_info = pydevd_schema.PydevdPythonInfo( + version=PY_VERSION_STR, + implementation=pydevd_schema.PydevdPythonImplementationInfo( + name=PY_IMPL_NAME, + version=PY_IMPL_VERSION_STR, + description=impl_desc, + ) + ) + platform_info = pydevd_schema.PydevdPlatformInfo(name=sys.platform) + process_info = pydevd_schema.PydevdProcessInfo( + pid=pid, + ppid=ppid, + executable=sys.executable, + bitness=64 if IS_64BIT_PROCESS else 32, + ) + pydevd_info = pydevd_schema.PydevdInfo( + usingCython=USING_CYTHON, + usingFrameEval=USING_FRAME_EVAL, + ) + body = { + 'python': py_info, + 'platform': platform_info, + 'process': process_info, + 'pydevd': pydevd_info, + } + response = pydevd_base_schema.build_response(request, kwargs={'body': body}) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + def on_setpydevdsourcemap_request(self, py_db, request): + args = request.arguments # : :type args: SetPydevdSourceMapArguments + SourceMappingEntry = self.api.SourceMappingEntry + + path = args.source.path + source_maps = args.pydevdSourceMaps + # : :type source_map: PydevdSourceMap + new_mappings = [ + SourceMappingEntry( + source_map['line'], + source_map['endLine'], + source_map['runtimeLine'], + self.api.filename_to_str(source_map['runtimeSource']['path']) + ) for source_map in source_maps + ] + + error_msg = self.api.set_source_mapping(py_db, path, new_mappings) + if error_msg: + response = pydevd_base_schema.build_response( + request, + kwargs={ + 'body': {}, + 'success': False, + 'message': error_msg, + }) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + + response = pydevd_base_schema.build_response(request) + return NetCommand(CMD_RETURN, 0, response, is_json=True) + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_referrers.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_referrers.py new file mode 100644 index 0000000..c7e1bfa --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_referrers.py @@ -0,0 +1,257 @@ +import sys +from _pydevd_bundle import pydevd_xml +from os.path import basename +from _pydev_bundle import pydev_log +from urllib.parse import unquote_plus +from _pydevd_bundle.pydevd_constants import IS_PY311_OR_GREATER + + +#=================================================================================================== +# print_var_node +#=================================================================================================== +def print_var_node(xml_node, stream): + name = xml_node.getAttribute('name') + value = xml_node.getAttribute('value') + val_type = xml_node.getAttribute('type') + + found_as = xml_node.getAttribute('found_as') + stream.write('Name: ') + stream.write(unquote_plus(name)) + stream.write(', Value: ') + stream.write(unquote_plus(value)) + stream.write(', Type: ') + stream.write(unquote_plus(val_type)) + if found_as: + stream.write(', Found as: %s' % (unquote_plus(found_as),)) + stream.write('\n') + + +#=================================================================================================== +# print_referrers +#=================================================================================================== +def print_referrers(obj, stream=None): + if stream is None: + stream = sys.stdout + result = get_referrer_info(obj) + from xml.dom.minidom import parseString + dom = parseString(result) + + xml = dom.getElementsByTagName('xml')[0] + for node in xml.childNodes: + if node.nodeType == node.TEXT_NODE: + continue + + if node.localName == 'for': + stream.write('Searching references for: ') + for child in node.childNodes: + if child.nodeType == node.TEXT_NODE: + continue + print_var_node(child, stream) + + elif node.localName == 'var': + stream.write('Referrer found: ') + print_var_node(node, stream) + + else: + sys.stderr.write('Unhandled node: %s\n' % (node,)) + + return result + + +#=================================================================================================== +# get_referrer_info +#=================================================================================================== +def get_referrer_info(searched_obj): + DEBUG = 0 + if DEBUG: + sys.stderr.write('Getting referrers info.\n') + try: + try: + if searched_obj is None: + ret = ['\n'] + + ret.append('\n') + ret.append(pydevd_xml.var_to_xml( + searched_obj, + 'Skipping getting referrers for None', + additional_in_xml=' id="%s"' % (id(searched_obj),))) + ret.append('\n') + ret.append('') + ret = ''.join(ret) + return ret + + obj_id = id(searched_obj) + + try: + if DEBUG: + sys.stderr.write('Getting referrers...\n') + import gc + referrers = gc.get_referrers(searched_obj) + except: + pydev_log.exception() + ret = ['\n'] + + ret.append('\n') + ret.append(pydevd_xml.var_to_xml( + searched_obj, + 'Exception raised while trying to get_referrers.', + additional_in_xml=' id="%s"' % (id(searched_obj),))) + ret.append('\n') + ret.append('') + ret = ''.join(ret) + return ret + + if DEBUG: + sys.stderr.write('Found %s referrers.\n' % (len(referrers),)) + + curr_frame = sys._getframe() + frame_type = type(curr_frame) + + # Ignore this frame and any caller frame of this frame + + ignore_frames = {} # Should be a set, but it's not available on all python versions. + while curr_frame is not None: + if basename(curr_frame.f_code.co_filename).startswith('pydev'): + ignore_frames[curr_frame] = 1 + curr_frame = curr_frame.f_back + + ret = ['\n'] + + ret.append('\n') + if DEBUG: + sys.stderr.write('Searching Referrers of obj with id="%s"\n' % (obj_id,)) + + ret.append(pydevd_xml.var_to_xml( + searched_obj, + 'Referrers of obj with id="%s"' % (obj_id,))) + ret.append('\n') + + curr_frame = sys._getframe() + all_objects = None + + for r in referrers: + try: + if r in ignore_frames: + continue # Skip the references we may add ourselves + except: + pass # Ok: unhashable type checked... + + if r is referrers: + continue + + if r is curr_frame.f_locals: + continue + + r_type = type(r) + r_id = str(id(r)) + + representation = str(r_type) + + found_as = '' + if r_type == frame_type: + if DEBUG: + sys.stderr.write('Found frame referrer: %r\n' % (r,)) + for key, val in r.f_locals.items(): + if val is searched_obj: + found_as = key + break + + elif r_type == dict: + if DEBUG: + sys.stderr.write('Found dict referrer: %r\n' % (r,)) + + # Try to check if it's a value in the dict (and under which key it was found) + for key, val in r.items(): + if val is searched_obj: + found_as = key + if DEBUG: + sys.stderr.write(' Found as %r in dict\n' % (found_as,)) + break + + # Ok, there's one annoying thing: many times we find it in a dict from an instance, + # but with this we don't directly have the class, only the dict, so, to workaround that + # we iterate over all reachable objects ad check if one of those has the given dict. + if all_objects is None: + all_objects = gc.get_objects() + + for x in all_objects: + try: + if getattr(x, '__dict__', None) is r: + r = x + r_type = type(x) + r_id = str(id(r)) + representation = str(r_type) + break + except: + pass # Just ignore any error here (i.e.: ReferenceError, etc.) + + elif r_type in (tuple, list): + if DEBUG: + sys.stderr.write('Found tuple referrer: %r\n' % (r,)) + + for i, x in enumerate(r): + if x is searched_obj: + found_as = '%s[%s]' % (r_type.__name__, i) + if DEBUG: + sys.stderr.write(' Found as %s in tuple: \n' % (found_as,)) + break + + elif IS_PY311_OR_GREATER: + # Up to Python 3.10, gc.get_referrers for an instance actually returned the + # object.__dict__, but on Python 3.11 it returns the actual object, so, + # handling is a bit easier (we don't need the workaround from the dict + # case to find the actual instance, we just need to find the attribute name). + if DEBUG: + sys.stderr.write('Found dict referrer: %r\n' % (r,)) + + dct = getattr(r, '__dict__', None) + if dct: + # Try to check if it's a value in the dict (and under which key it was found) + for key, val in dct.items(): + if val is searched_obj: + found_as = key + if DEBUG: + sys.stderr.write(' Found as %r in object instance\n' % (found_as,)) + break + + if found_as: + if not isinstance(found_as, str): + found_as = str(found_as) + found_as = ' found_as="%s"' % (pydevd_xml.make_valid_xml_value(found_as),) + + ret.append(pydevd_xml.var_to_xml( + r, + representation, + additional_in_xml=' id="%s"%s' % (r_id, found_as))) + finally: + if DEBUG: + sys.stderr.write('Done searching for references.\n') + + # If we have any exceptions, don't keep dangling references from this frame to any of our objects. + all_objects = None + referrers = None + searched_obj = None + r = None + x = None + key = None + val = None + curr_frame = None + ignore_frames = None + except: + pydev_log.exception() + ret = ['\n'] + + ret.append('\n') + ret.append(pydevd_xml.var_to_xml( + searched_obj, + 'Error getting referrers for:', + additional_in_xml=' id="%s"' % (id(searched_obj),))) + ret.append('\n') + ret.append('') + ret = ''.join(ret) + return ret + + ret.append('') + ret = ''.join(ret) + return ret + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_reload.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_reload.py new file mode 100644 index 0000000..507e73b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_reload.py @@ -0,0 +1,433 @@ +""" +Based on the python xreload. + +Changes +====================== + +1. we don't recreate the old namespace from new classes. Rather, we keep the existing namespace, +load a new version of it and update only some of the things we can inplace. That way, we don't break +things such as singletons or end up with a second representation of the same class in memory. + +2. If we find it to be a __metaclass__, we try to update it as a regular class. + +3. We don't remove old attributes (and leave them lying around even if they're no longer used). + +4. Reload hooks were changed + +These changes make it more stable, especially in the common case (where in a debug session only the +contents of a function are changed), besides providing flexibility for users that want to extend +on it. + + + +Hooks +====================== + +Classes/modules can be specially crafted to work with the reload (so that it can, for instance, +update some constant which was changed). + +1. To participate in the change of some attribute: + + In a module: + + __xreload_old_new__(namespace, name, old, new) + + in a class: + + @classmethod + __xreload_old_new__(cls, name, old, new) + + A class or module may include a method called '__xreload_old_new__' which is called when we're + unable to reload a given attribute. + + + +2. To do something after the whole reload is finished: + + In a module: + + __xreload_after_reload_update__(namespace): + + In a class: + + @classmethod + __xreload_after_reload_update__(cls): + + + A class or module may include a method called '__xreload_after_reload_update__' which is called + after the reload finishes. + + +Important: when providing a hook, always use the namespace or cls provided and not anything in the global +namespace, as the global namespace are only temporarily created during the reload and may not reflect the +actual application state (while the cls and namespace passed are). + + +Current limitations +====================== + + +- Attributes/constants are added, but not changed (so singletons and the application state is not + broken -- use provided hooks to workaround it). + +- Code using metaclasses may not always work. + +- Functions and methods using decorators (other than classmethod and staticmethod) are not handled + correctly. + +- Renamings are not handled correctly. + +- Dependent modules are not reloaded. + +- New __slots__ can't be added to existing classes. + + +Info +====================== + +Original: http://svn.python.org/projects/sandbox/trunk/xreload/xreload.py +Note: it seems https://github.com/plone/plone.reload/blob/master/plone/reload/xreload.py enhances it (to check later) + +Interesting alternative: https://code.google.com/p/reimport/ + +Alternative to reload(). + +This works by executing the module in a scratch namespace, and then patching classes, methods and +functions in place. This avoids the need to patch instances. New objects are copied into the +target namespace. + +""" + +from _pydev_bundle.pydev_imports import execfile +from _pydevd_bundle import pydevd_dont_trace +import types +from _pydev_bundle import pydev_log +from _pydevd_bundle.pydevd_constants import get_global_debugger + +NO_DEBUG = 0 +LEVEL1 = 1 +LEVEL2 = 2 + +DEBUG = NO_DEBUG + + +def write_err(*args): + py_db = get_global_debugger() + if py_db is not None: + new_lst = [] + for a in args: + new_lst.append(str(a)) + + msg = ' '.join(new_lst) + s = 'code reload: %s\n' % (msg,) + cmd = py_db.cmd_factory.make_io_message(s, 2) + if py_db.writer is not None: + py_db.writer.add_command(cmd) + + +def notify_info0(*args): + write_err(*args) + + +def notify_info(*args): + if DEBUG >= LEVEL1: + write_err(*args) + + +def notify_info2(*args): + if DEBUG >= LEVEL2: + write_err(*args) + + +def notify_error(*args): + write_err(*args) + + +#======================================================================================================================= +# code_objects_equal +#======================================================================================================================= +def code_objects_equal(code0, code1): + for d in dir(code0): + if d.startswith('_') or 'line' in d or d in ('replace', 'co_positions', 'co_qualname'): + continue + if getattr(code0, d) != getattr(code1, d): + return False + return True + + +#======================================================================================================================= +# xreload +#======================================================================================================================= +def xreload(mod): + """Reload a module in place, updating classes, methods and functions. + + mod: a module object + + Returns a boolean indicating whether a change was done. + """ + r = Reload(mod) + r.apply() + found_change = r.found_change + r = None + pydevd_dont_trace.clear_trace_filter_cache() + return found_change + +# This isn't actually used... Initially I planned to reload variables which are immutable on the +# namespace, but this can destroy places where we're saving state, which may not be what we want, +# so, we're being conservative and giving the user hooks if he wants to do a reload. +# +# immutable_types = [int, str, float, tuple] #That should be common to all Python versions +# +# for name in 'long basestr unicode frozenset'.split(): +# try: +# immutable_types.append(__builtins__[name]) +# except: +# pass #Just ignore: not all python versions are created equal. +# immutable_types = tuple(immutable_types) + + +#======================================================================================================================= +# Reload +#======================================================================================================================= +class Reload: + + def __init__(self, mod, mod_name=None, mod_filename=None): + self.mod = mod + if mod_name: + self.mod_name = mod_name + else: + self.mod_name = mod.__name__ if mod is not None else None + + if mod_filename: + self.mod_filename = mod_filename + else: + self.mod_filename = mod.__file__ if mod is not None else None + + self.found_change = False + + def apply(self): + mod = self.mod + self._on_finish_callbacks = [] + try: + # Get the module namespace (dict) early; this is part of the type check + modns = mod.__dict__ + + # Execute the code. We copy the module dict to a temporary; then + # clear the module dict; then execute the new code in the module + # dict; then swap things back and around. This trick (due to + # Glyph Lefkowitz) ensures that the (readonly) __globals__ + # attribute of methods and functions is set to the correct dict + # object. + new_namespace = modns.copy() + new_namespace.clear() + if self.mod_filename: + new_namespace["__file__"] = self.mod_filename + try: + new_namespace["__builtins__"] = __builtins__ + except NameError: + raise # Ok if not there. + + if self.mod_name: + new_namespace["__name__"] = self.mod_name + if new_namespace["__name__"] == '__main__': + # We do this because usually the __main__ starts-up the program, guarded by + # the if __name__ == '__main__', but we don't want to start the program again + # on a reload. + new_namespace["__name__"] = '__main_reloaded__' + + execfile(self.mod_filename, new_namespace, new_namespace) + # Now we get to the hard part + oldnames = set(modns) + newnames = set(new_namespace) + + # Create new tokens (note: not deleting existing) + for name in newnames - oldnames: + notify_info0('Added:', name, 'to namespace') + self.found_change = True + modns[name] = new_namespace[name] + + # Update in-place what we can + for name in oldnames & newnames: + self._update(modns, name, modns[name], new_namespace[name]) + + self._handle_namespace(modns) + + for c in self._on_finish_callbacks: + c() + del self._on_finish_callbacks[:] + except: + pydev_log.exception() + + def _handle_namespace(self, namespace, is_class_namespace=False): + on_finish = None + if is_class_namespace: + xreload_after_update = getattr(namespace, '__xreload_after_reload_update__', None) + if xreload_after_update is not None: + self.found_change = True + on_finish = lambda: xreload_after_update() + + elif '__xreload_after_reload_update__' in namespace: + xreload_after_update = namespace['__xreload_after_reload_update__'] + self.found_change = True + on_finish = lambda: xreload_after_update(namespace) + + if on_finish is not None: + # If a client wants to know about it, give him a chance. + self._on_finish_callbacks.append(on_finish) + + def _update(self, namespace, name, oldobj, newobj, is_class_namespace=False): + """Update oldobj, if possible in place, with newobj. + + If oldobj is immutable, this simply returns newobj. + + Args: + oldobj: the object to be updated + newobj: the object used as the source for the update + """ + try: + notify_info2('Updating: ', oldobj) + if oldobj is newobj: + # Probably something imported + return + + if type(oldobj) is not type(newobj): + # Cop-out: if the type changed, give up + if name not in ('__builtins__',): + notify_error('Type of: %s (old: %s != new: %s) changed... Skipping.' % (name, type(oldobj), type(newobj))) + return + + if isinstance(newobj, types.FunctionType): + self._update_function(oldobj, newobj) + return + + if isinstance(newobj, types.MethodType): + self._update_method(oldobj, newobj) + return + + if isinstance(newobj, classmethod): + self._update_classmethod(oldobj, newobj) + return + + if isinstance(newobj, staticmethod): + self._update_staticmethod(oldobj, newobj) + return + + if hasattr(types, 'ClassType'): + classtype = (types.ClassType, type) # object is not instance of types.ClassType. + else: + classtype = type + + if isinstance(newobj, classtype): + self._update_class(oldobj, newobj) + return + + # New: dealing with metaclasses. + if hasattr(newobj, '__metaclass__') and hasattr(newobj, '__class__') and newobj.__metaclass__ == newobj.__class__: + self._update_class(oldobj, newobj) + return + + if namespace is not None: + # Check for the `__xreload_old_new__` protocol (don't even compare things + # as even doing a comparison may break things -- see: https://github.com/microsoft/debugpy/issues/615). + xreload_old_new = None + if is_class_namespace: + xreload_old_new = getattr(namespace, '__xreload_old_new__', None) + if xreload_old_new is not None: + self.found_change = True + xreload_old_new(name, oldobj, newobj) + + elif '__xreload_old_new__' in namespace: + xreload_old_new = namespace['__xreload_old_new__'] + xreload_old_new(namespace, name, oldobj, newobj) + self.found_change = True + + # Too much information to the user... + # else: + # notify_info0('%s NOT updated. Create __xreload_old_new__(name, old, new) for custom reload' % (name,)) + + except: + notify_error('Exception found when updating %s. Proceeding for other items.' % (name,)) + pydev_log.exception() + + # All of the following functions have the same signature as _update() + + def _update_function(self, oldfunc, newfunc): + """Update a function object.""" + oldfunc.__doc__ = newfunc.__doc__ + oldfunc.__dict__.update(newfunc.__dict__) + + try: + newfunc.__code__ + attr_name = '__code__' + except AttributeError: + newfunc.func_code + attr_name = 'func_code' + + old_code = getattr(oldfunc, attr_name) + new_code = getattr(newfunc, attr_name) + if not code_objects_equal(old_code, new_code): + notify_info0('Updated function code:', oldfunc) + setattr(oldfunc, attr_name, new_code) + self.found_change = True + + try: + oldfunc.__defaults__ = newfunc.__defaults__ + except AttributeError: + oldfunc.func_defaults = newfunc.func_defaults + + return oldfunc + + def _update_method(self, oldmeth, newmeth): + """Update a method object.""" + # XXX What if im_func is not a function? + if hasattr(oldmeth, 'im_func') and hasattr(newmeth, 'im_func'): + self._update(None, None, oldmeth.im_func, newmeth.im_func) + elif hasattr(oldmeth, '__func__') and hasattr(newmeth, '__func__'): + self._update(None, None, oldmeth.__func__, newmeth.__func__) + return oldmeth + + def _update_class(self, oldclass, newclass): + """Update a class object.""" + olddict = oldclass.__dict__ + newdict = newclass.__dict__ + + oldnames = set(olddict) + newnames = set(newdict) + + for name in newnames - oldnames: + setattr(oldclass, name, newdict[name]) + notify_info0('Added:', name, 'to', oldclass) + self.found_change = True + + # Note: not removing old things... + # for name in oldnames - newnames: + # notify_info('Removed:', name, 'from', oldclass) + # delattr(oldclass, name) + + for name in (oldnames & newnames) - set(['__dict__', '__doc__']): + self._update(oldclass, name, olddict[name], newdict[name], is_class_namespace=True) + + old_bases = getattr(oldclass, '__bases__', None) + new_bases = getattr(newclass, '__bases__', None) + if str(old_bases) != str(new_bases): + notify_error('Changing the hierarchy of a class is not supported. %s may be inconsistent.' % (oldclass,)) + + self._handle_namespace(oldclass, is_class_namespace=True) + + def _update_classmethod(self, oldcm, newcm): + """Update a classmethod update.""" + # While we can't modify the classmethod object itself (it has no + # mutable attributes), we *can* extract the underlying function + # (by calling __get__(), which returns a method object) and update + # it in-place. We don't have the class available to pass to + # __get__() but any object except None will do. + self._update(None, None, oldcm.__get__(0), newcm.__get__(0)) + + def _update_staticmethod(self, oldsm, newsm): + """Update a staticmethod update.""" + # While we can't modify the staticmethod object itself (it has no + # mutable attributes), we *can* extract the underlying function + # (by calling __get__(), which returns it) and update it in-place. + # We don't have the class available to pass to __get__() but any + # object except None will do. + self._update(None, None, oldsm.__get__(0), newsm.__get__(0)) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_resolver.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_resolver.py new file mode 100644 index 0000000..ac7aa10 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_resolver.py @@ -0,0 +1,723 @@ +from _pydev_bundle import pydev_log +from _pydevd_bundle.pydevd_utils import hasattr_checked, DAPGrouper, Timer +from io import StringIO +import traceback +from os.path import basename + +from functools import partial +from _pydevd_bundle.pydevd_constants import IS_PY36_OR_GREATER, \ + MethodWrapperType, RETURN_VALUES_DICT, DebugInfoHolder, IS_PYPY, GENERATED_LEN_ATTR_NAME +from _pydevd_bundle.pydevd_safe_repr import SafeRepr + +# Note: 300 is already a lot to see in the outline (after that the user should really use the shell to get things) +# and this also means we'll pass less information to the client side (which makes debugging faster). +MAX_ITEMS_TO_HANDLE = 300 + +TOO_LARGE_MSG = 'Too large to show contents. Max items to show: ' + str(MAX_ITEMS_TO_HANDLE) +TOO_LARGE_ATTR = 'Unable to handle:' + + +#======================================================================================================================= +# UnableToResolveVariableException +#======================================================================================================================= +class UnableToResolveVariableException(Exception): + pass + + +try: + from collections import OrderedDict +except: + OrderedDict = dict + +try: + import java.lang # @UnresolvedImport +except: + pass + +#======================================================================================================================= +# See: pydevd_extension_api module for resolver interface +#======================================================================================================================= + + +def sorted_attributes_key(attr_name): + if attr_name.startswith('__'): + if attr_name.endswith('__'): + # __ double under before and after __ + return (3, attr_name) + else: + # __ double under before + return (2, attr_name) + elif attr_name.startswith('_'): + # _ single under + return (1, attr_name) + else: + # Regular (Before anything) + return (0, attr_name) + + +#======================================================================================================================= +# DefaultResolver +#======================================================================================================================= +class DefaultResolver: + ''' + DefaultResolver is the class that'll actually resolve how to show some variable. + ''' + + def resolve(self, var, attribute): + return getattr(var, attribute) + + def get_contents_debug_adapter_protocol(self, obj, fmt=None): + if MethodWrapperType: + dct, used___dict__ = self._get_py_dictionary(obj) + else: + dct = self._get_jy_dictionary(obj)[0] + + lst = sorted(dct.items(), key=lambda tup: sorted_attributes_key(tup[0])) + if used___dict__: + eval_name = '.__dict__[%s]' + else: + eval_name = '.%s' + + ret = [] + for attr_name, attr_value in lst: + entry = (attr_name, attr_value, eval_name % attr_name) + ret.append(entry) + + return ret + + def get_dictionary(self, var, names=None, used___dict__=False): + if MethodWrapperType: + return self._get_py_dictionary(var, names, used___dict__=used___dict__)[0] + else: + return self._get_jy_dictionary(var)[0] + + def _get_jy_dictionary(self, obj): + ret = {} + found = java.util.HashMap() + + original = obj + if hasattr_checked(obj, '__class__') and obj.__class__ == java.lang.Class: + + # get info about superclasses + classes = [] + classes.append(obj) + c = obj.getSuperclass() + while c != None: + classes.append(c) + c = c.getSuperclass() + + # get info about interfaces + interfs = [] + for obj in classes: + interfs.extend(obj.getInterfaces()) + classes.extend(interfs) + + # now is the time when we actually get info on the declared methods and fields + for obj in classes: + + declaredMethods = obj.getDeclaredMethods() + declaredFields = obj.getDeclaredFields() + for i in range(len(declaredMethods)): + name = declaredMethods[i].getName() + ret[name] = declaredMethods[i].toString() + found.put(name, 1) + + for i in range(len(declaredFields)): + name = declaredFields[i].getName() + found.put(name, 1) + # if declaredFields[i].isAccessible(): + declaredFields[i].setAccessible(True) + # ret[name] = declaredFields[i].get( declaredFields[i] ) + try: + ret[name] = declaredFields[i].get(original) + except: + ret[name] = declaredFields[i].toString() + + # this simple dir does not always get all the info, that's why we have the part before + # (e.g.: if we do a dir on String, some methods that are from other interfaces such as + # charAt don't appear) + try: + d = dir(original) + for name in d: + if found.get(name) != 1: + ret[name] = getattr(original, name) + except: + # sometimes we're unable to do a dir + pass + + return ret + + def get_names(self, var): + used___dict__ = False + try: + names = dir(var) + except Exception: + names = [] + if not names: + if hasattr_checked(var, '__dict__'): + names = list(var.__dict__) + used___dict__ = True + return names, used___dict__ + + def _get_py_dictionary(self, var, names=None, used___dict__=False): + ''' + :return tuple(names, used___dict__), where used___dict__ means we have to access + using obj.__dict__[name] instead of getattr(obj, name) + ''' + + # On PyPy we never show functions. This is because of a corner case where PyPy becomes + # absurdly slow -- it takes almost half a second to introspect a single numpy function (so, + # the related test, "test_case_16_resolve_numpy_array", times out... this probably isn't + # specific to numpy, but to any library where the CPython bridge is used, but as we + # can't be sure in the debugger, we play it safe and don't show it at all). + filter_function = IS_PYPY + + if not names: + names, used___dict__ = self.get_names(var) + d = {} + + # Be aware that the order in which the filters are applied attempts to + # optimize the operation by removing as many items as possible in the + # first filters, leaving fewer items for later filters + + timer = Timer() + cls = type(var) + for name in names: + try: + name_as_str = name + if name_as_str.__class__ != str: + name_as_str = '%r' % (name_as_str,) + + if not used___dict__: + attr = getattr(var, name) + else: + attr = var.__dict__[name] + + # filter functions? + if filter_function: + if inspect.isroutine(attr) or isinstance(attr, MethodWrapperType): + continue + except: + # if some error occurs getting it, let's put it to the user. + strIO = StringIO() + traceback.print_exc(file=strIO) + attr = strIO.getvalue() + + finally: + timer.report_if_getting_attr_slow(cls, name_as_str) + + d[name_as_str] = attr + + return d, used___dict__ + + +class DAPGrouperResolver: + + def get_contents_debug_adapter_protocol(self, obj, fmt=None): + return obj.get_contents_debug_adapter_protocol() + + +_basic_immutable_types = (int, float, complex, str, bytes, type(None), bool, frozenset) + + +def _does_obj_repr_evaluate_to_obj(obj): + ''' + If obj is an object where evaluating its representation leads to + the same object, return True, otherwise, return False. + ''' + try: + if isinstance(obj, tuple): + for o in obj: + if not _does_obj_repr_evaluate_to_obj(o): + return False + return True + else: + return isinstance(obj, _basic_immutable_types) + except: + return False + + +#======================================================================================================================= +# DictResolver +#======================================================================================================================= +class DictResolver: + + sort_keys = not IS_PY36_OR_GREATER + + def resolve(self, dct, key): + if key in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR): + return None + + if '(' not in key: + # we have to treat that because the dict resolver is also used to directly resolve the global and local + # scopes (which already have the items directly) + try: + return dct[key] + except: + return getattr(dct, key) + + # ok, we have to iterate over the items to find the one that matches the id, because that's the only way + # to actually find the reference from the string we have before. + expected_id = int(key.split('(')[-1][:-1]) + for key, val in dct.items(): + if id(key) == expected_id: + return val + + raise UnableToResolveVariableException() + + def key_to_str(self, key, fmt=None): + if fmt is not None: + if fmt.get('hex', False): + safe_repr = SafeRepr() + safe_repr.convert_to_hex = True + return safe_repr(key) + return '%r' % (key,) + + def init_dict(self): + return {} + + def get_contents_debug_adapter_protocol(self, dct, fmt=None): + ''' + This method is to be used in the case where the variables are all saved by its id (and as + such don't need to have the `resolve` method called later on, so, keys don't need to + embed the reference in the key). + + Note that the return should be ordered. + + :return list(tuple(name:str, value:object, evaluateName:str)) + ''' + ret = [] + + i = 0 + + found_representations = set() + + for key, val in dct.items(): + i += 1 + key_as_str = self.key_to_str(key, fmt) + + if key_as_str not in found_representations: + found_representations.add(key_as_str) + else: + # If the key would be a duplicate, add the key id (otherwise + # VSCode won't show all keys correctly). + # See: https://github.com/microsoft/debugpy/issues/148 + key_as_str = '%s (id: %s)' % (key_as_str, id(key)) + found_representations.add(key_as_str) + + if _does_obj_repr_evaluate_to_obj(key): + s = self.key_to_str(key) # do not format the key + eval_key_str = '[%s]' % (s,) + else: + eval_key_str = None + ret.append((key_as_str, val, eval_key_str)) + if i > MAX_ITEMS_TO_HANDLE: + ret.append((TOO_LARGE_ATTR, TOO_LARGE_MSG, None)) + break + + # in case the class extends built-in type and has some additional fields + from_default_resolver = defaultResolver.get_contents_debug_adapter_protocol(dct, fmt) + + if from_default_resolver: + ret = from_default_resolver + ret + + if self.sort_keys: + ret = sorted(ret, key=lambda tup: sorted_attributes_key(tup[0])) + + ret.append((GENERATED_LEN_ATTR_NAME, len(dct), partial(_apply_evaluate_name, evaluate_name='len(%s)'))) + return ret + + def get_dictionary(self, dct): + ret = self.init_dict() + + i = 0 + for key, val in dct.items(): + i += 1 + # we need to add the id because otherwise we cannot find the real object to get its contents later on. + key = '%s (%s)' % (self.key_to_str(key), id(key)) + ret[key] = val + if i > MAX_ITEMS_TO_HANDLE: + ret[TOO_LARGE_ATTR] = TOO_LARGE_MSG + break + + # in case if the class extends built-in type and has some additional fields + additional_fields = defaultResolver.get_dictionary(dct) + ret.update(additional_fields) + ret[GENERATED_LEN_ATTR_NAME] = len(dct) + return ret + + +def _apply_evaluate_name(parent_name, evaluate_name): + return evaluate_name % (parent_name,) + + +#======================================================================================================================= +# TupleResolver +#======================================================================================================================= +class TupleResolver: # to enumerate tuples and lists + + def resolve(self, var, attribute): + ''' + @param var: that's the original attribute + @param attribute: that's the key passed in the dict (as a string) + ''' + if attribute in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR): + return None + try: + return var[int(attribute)] + except: + return getattr(var, attribute) + + def get_contents_debug_adapter_protocol(self, lst, fmt=None): + ''' + This method is to be used in the case where the variables are all saved by its id (and as + such don't need to have the `resolve` method called later on, so, keys don't need to + embed the reference in the key). + + Note that the return should be ordered. + + :return list(tuple(name:str, value:object, evaluateName:str)) + ''' + l = len(lst) + ret = [] + + format_str = '%0' + str(int(len(str(l - 1)))) + 'd' + if fmt is not None and fmt.get('hex', False): + format_str = '0x%0' + str(int(len(hex(l).lstrip('0x')))) + 'x' + + for i, item in enumerate(lst): + ret.append((format_str % i, item, '[%s]' % i)) + + if i > MAX_ITEMS_TO_HANDLE: + ret.append((TOO_LARGE_ATTR, TOO_LARGE_MSG, None)) + break + + # Needed in case the class extends the built-in type and has some additional fields. + from_default_resolver = defaultResolver.get_contents_debug_adapter_protocol(lst, fmt=fmt) + if from_default_resolver: + ret = from_default_resolver + ret + + ret.append((GENERATED_LEN_ATTR_NAME, len(lst), partial(_apply_evaluate_name, evaluate_name='len(%s)'))) + return ret + + def get_dictionary(self, var, fmt={}): + l = len(var) + d = {} + + format_str = '%0' + str(int(len(str(l - 1)))) + 'd' + if fmt is not None and fmt.get('hex', False): + format_str = '0x%0' + str(int(len(hex(l).lstrip('0x')))) + 'x' + + for i, item in enumerate(var): + d[format_str % i] = item + + if i > MAX_ITEMS_TO_HANDLE: + d[TOO_LARGE_ATTR] = TOO_LARGE_MSG + break + + # in case if the class extends built-in type and has some additional fields + additional_fields = defaultResolver.get_dictionary(var) + d.update(additional_fields) + d[GENERATED_LEN_ATTR_NAME] = len(var) + return d + + +#======================================================================================================================= +# SetResolver +#======================================================================================================================= +class SetResolver: + ''' + Resolves a set as dict id(object)->object + ''' + + def get_contents_debug_adapter_protocol(self, obj, fmt=None): + ret = [] + + for i, item in enumerate(obj): + ret.append((str(id(item)), item, None)) + + if i > MAX_ITEMS_TO_HANDLE: + ret.append((TOO_LARGE_ATTR, TOO_LARGE_MSG, None)) + break + + # Needed in case the class extends the built-in type and has some additional fields. + from_default_resolver = defaultResolver.get_contents_debug_adapter_protocol(obj, fmt=fmt) + if from_default_resolver: + ret = from_default_resolver + ret + ret.append((GENERATED_LEN_ATTR_NAME, len(obj), partial(_apply_evaluate_name, evaluate_name='len(%s)'))) + return ret + + def resolve(self, var, attribute): + if attribute in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR): + return None + + try: + attribute = int(attribute) + except: + return getattr(var, attribute) + + for v in var: + if id(v) == attribute: + return v + + raise UnableToResolveVariableException('Unable to resolve %s in %s' % (attribute, var)) + + def get_dictionary(self, var): + d = {} + for i, item in enumerate(var): + d[str(id(item))] = item + + if i > MAX_ITEMS_TO_HANDLE: + d[TOO_LARGE_ATTR] = TOO_LARGE_MSG + break + + # in case if the class extends built-in type and has some additional fields + additional_fields = defaultResolver.get_dictionary(var) + d.update(additional_fields) + d[GENERATED_LEN_ATTR_NAME] = len(var) + return d + + def change_var_from_name(self, container, name, new_value): + # The name given in this case must be the id(item), so, we can actually + # iterate in the set and see which item matches the given id. + + try: + # Check that the new value can actually be added to a set (i.e.: it's hashable/comparable). + set().add(new_value) + except: + return None + + for item in container: + if str(id(item)) == name: + container.remove(item) + container.add(new_value) + return str(id(new_value)) + + return None + + +#======================================================================================================================= +# InstanceResolver +#======================================================================================================================= +class InstanceResolver: + + def resolve(self, var, attribute): + field = var.__class__.getDeclaredField(attribute) + field.setAccessible(True) + return field.get(var) + + def get_dictionary(self, obj): + ret = {} + + declaredFields = obj.__class__.getDeclaredFields() + for i in range(len(declaredFields)): + name = declaredFields[i].getName() + try: + declaredFields[i].setAccessible(True) + ret[name] = declaredFields[i].get(obj) + except: + pydev_log.exception() + + return ret + + +#======================================================================================================================= +# JyArrayResolver +#======================================================================================================================= +class JyArrayResolver: + ''' + This resolves a regular Object[] array from java + ''' + + def resolve(self, var, attribute): + if attribute == GENERATED_LEN_ATTR_NAME: + return None + return var[int(attribute)] + + def get_dictionary(self, obj): + ret = {} + + for i in range(len(obj)): + ret[ i ] = obj[i] + + ret[GENERATED_LEN_ATTR_NAME] = len(obj) + return ret + + +#======================================================================================================================= +# MultiValueDictResolver +#======================================================================================================================= +class MultiValueDictResolver(DictResolver): + + def resolve(self, dct, key): + if key in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR): + return None + + # ok, we have to iterate over the items to find the one that matches the id, because that's the only way + # to actually find the reference from the string we have before. + expected_id = int(key.split('(')[-1][:-1]) + for key in list(dct.keys()): + val = dct.getlist(key) + if id(key) == expected_id: + return val + + raise UnableToResolveVariableException() + + +#======================================================================================================================= +# DjangoFormResolver +#======================================================================================================================= +class DjangoFormResolver(DefaultResolver): + + def get_dictionary(self, var, names=None): + # Do not call self.errors because it is a property and has side effects. + names, used___dict__ = self.get_names(var) + + has_errors_attr = False + if "errors" in names: + has_errors_attr = True + names.remove("errors") + + d = defaultResolver.get_dictionary(var, names=names, used___dict__=used___dict__) + if has_errors_attr: + try: + errors_attr = getattr(var, "_errors") + except: + errors_attr = None + d["errors"] = errors_attr + return d + + +#======================================================================================================================= +# DequeResolver +#======================================================================================================================= +class DequeResolver(TupleResolver): + + def get_dictionary(self, var): + d = TupleResolver.get_dictionary(self, var) + d['maxlen'] = getattr(var, 'maxlen', None) + return d + + +#======================================================================================================================= +# OrderedDictResolver +#======================================================================================================================= +class OrderedDictResolver(DictResolver): + + sort_keys = False + + def init_dict(self): + return OrderedDict() + + +#======================================================================================================================= +# FrameResolver +#======================================================================================================================= +class FrameResolver: + ''' + This resolves a frame. + ''' + + def resolve(self, obj, attribute): + if attribute == '__internals__': + return defaultResolver.get_dictionary(obj) + + if attribute == 'stack': + return self.get_frame_stack(obj) + + if attribute == 'f_locals': + return obj.f_locals + + return None + + def get_dictionary(self, obj): + ret = {} + ret['__internals__'] = defaultResolver.get_dictionary(obj) + ret['stack'] = self.get_frame_stack(obj) + ret['f_locals'] = obj.f_locals + return ret + + def get_frame_stack(self, frame): + ret = [] + if frame is not None: + ret.append(self.get_frame_name(frame)) + + while frame.f_back: + frame = frame.f_back + ret.append(self.get_frame_name(frame)) + + return ret + + def get_frame_name(self, frame): + if frame is None: + return 'None' + try: + name = basename(frame.f_code.co_filename) + return 'frame: %s [%s:%s] id:%s' % (frame.f_code.co_name, name, frame.f_lineno, id(frame)) + except: + return 'frame object' + + +defaultResolver = DefaultResolver() +dictResolver = DictResolver() +tupleResolver = TupleResolver() +instanceResolver = InstanceResolver() +jyArrayResolver = JyArrayResolver() +setResolver = SetResolver() +multiValueDictResolver = MultiValueDictResolver() +djangoFormResolver = DjangoFormResolver() +dequeResolver = DequeResolver() +orderedDictResolver = OrderedDictResolver() +frameResolver = FrameResolver() +dapGrouperResolver = DAPGrouperResolver() + + +class InspectStub: + + def isbuiltin(self, _args): + return False + + def isroutine(self, object): + return False + + +try: + import inspect +except: + inspect = InspectStub() + + +def get_var_scope(attr_name, attr_value, evaluate_name, handle_return_values): + if attr_name.startswith("'"): + if attr_name.endswith("'"): + attr_name = attr_name[1:-1] + else: + i = attr_name.find("__' (") + if i >= 0: + # Handle attr_name such as: >>'__name__' (1732494379184)<< + attr_name = attr_name[1: i + 2] + + if handle_return_values and attr_name == RETURN_VALUES_DICT: + return '' + + elif attr_name == GENERATED_LEN_ATTR_NAME: + return '' + + if attr_name.startswith('__') and attr_name.endswith('__'): + return DAPGrouper.SCOPE_SPECIAL_VARS + + if attr_name.startswith('_') or attr_name.endswith('__'): + return DAPGrouper.SCOPE_PROTECTED_VARS + + try: + if inspect.isroutine(attr_value) or isinstance(attr_value, MethodWrapperType): + return DAPGrouper.SCOPE_FUNCTION_VARS + + elif inspect.isclass(attr_value): + return DAPGrouper.SCOPE_CLASS_VARS + except: + # It's possible that isinstance throws an exception when dealing with user-code. + if DebugInfoHolder.DEBUG_TRACE_LEVEL > 0: + pydev_log.exception() + + return '' diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_runpy.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_runpy.py new file mode 100644 index 0000000..09b713f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_runpy.py @@ -0,0 +1,353 @@ +""" +Vendored copy of runpy from the standard library. + +It's vendored so that we can properly ignore it when used to start user code +while still making it possible for the user to debug runpy itself. + +runpy.py - locating and running Python code using the module namespace + +Provides support for locating and running Python scripts using the Python +module namespace instead of the native filesystem. + +This allows Python code to play nicely with non-filesystem based PEP 302 +importers when locating support scripts as well as when importing modules. +""" +# Written by Nick Coghlan +# to implement PEP 338 (Executing Modules as Scripts) + +import sys +import importlib.machinery # importlib first so we can test #15386 via -m +import importlib.util +import io +import types +import os + +__all__ = [ + "run_module", "run_path", +] + + +# Note: fabioz: Don't use pkgutil (when handling caught exceptions we could end up +# showing exceptions in pkgutil.get_imported (specifically the KeyError), so, +# create a copy of the function we need to properly ignore this exception when +# running the program. +def pkgutil_get_importer(path_item): + """Retrieve a finder for the given path item + + The returned finder is cached in sys.path_importer_cache + if it was newly created by a path hook. + + The cache (or part of it) can be cleared manually if a + rescan of sys.path_hooks is necessary. + """ + try: + importer = sys.path_importer_cache[path_item] + except KeyError: + for path_hook in sys.path_hooks: + try: + importer = path_hook(path_item) + sys.path_importer_cache.setdefault(path_item, importer) + break + except ImportError: + pass + else: + importer = None + return importer + + +class _TempModule(object): + """Temporarily replace a module in sys.modules with an empty namespace""" + + def __init__(self, mod_name): + self.mod_name = mod_name + self.module = types.ModuleType(mod_name) + self._saved_module = [] + + def __enter__(self): + mod_name = self.mod_name + try: + self._saved_module.append(sys.modules[mod_name]) + except KeyError: + pass + sys.modules[mod_name] = self.module + return self + + def __exit__(self, *args): + if self._saved_module: + sys.modules[self.mod_name] = self._saved_module[0] + else: + del sys.modules[self.mod_name] + self._saved_module = [] + + +class _ModifiedArgv0(object): + + def __init__(self, value): + self.value = value + self._saved_value = self._sentinel = object() + + def __enter__(self): + if self._saved_value is not self._sentinel: + raise RuntimeError("Already preserving saved value") + self._saved_value = sys.argv[0] + sys.argv[0] = self.value + + def __exit__(self, *args): + self.value = self._sentinel + sys.argv[0] = self._saved_value + + +# TODO: Replace these helpers with importlib._bootstrap_external functions. +def _run_code(code, run_globals, init_globals=None, + mod_name=None, mod_spec=None, + pkg_name=None, script_name=None): + """Helper to run code in nominated namespace""" + if init_globals is not None: + run_globals.update(init_globals) + if mod_spec is None: + loader = None + fname = script_name + cached = None + else: + loader = mod_spec.loader + fname = mod_spec.origin + cached = mod_spec.cached + if pkg_name is None: + pkg_name = mod_spec.parent + run_globals.update(__name__=mod_name, + __file__=fname, + __cached__=cached, + __doc__=None, + __loader__=loader, + __package__=pkg_name, + __spec__=mod_spec) + exec(code, run_globals) + return run_globals + + +def _run_module_code(code, init_globals=None, + mod_name=None, mod_spec=None, + pkg_name=None, script_name=None): + """Helper to run code in new namespace with sys modified""" + fname = script_name if mod_spec is None else mod_spec.origin + with _TempModule(mod_name) as temp_module, _ModifiedArgv0(fname): + mod_globals = temp_module.module.__dict__ + _run_code(code, mod_globals, init_globals, + mod_name, mod_spec, pkg_name, script_name) + # Copy the globals of the temporary module, as they + # may be cleared when the temporary module goes away + return mod_globals.copy() + + +# Helper to get the full name, spec and code for a module +def _get_module_details(mod_name, error=ImportError): + if mod_name.startswith("."): + raise error("Relative module names not supported") + pkg_name, _, _ = mod_name.rpartition(".") + if pkg_name: + # Try importing the parent to avoid catching initialization errors + try: + __import__(pkg_name) + except ImportError as e: + # If the parent or higher ancestor package is missing, let the + # error be raised by find_spec() below and then be caught. But do + # not allow other errors to be caught. + if e.name is None or (e.name != pkg_name and + not pkg_name.startswith(e.name + ".")): + raise + # Warn if the module has already been imported under its normal name + existing = sys.modules.get(mod_name) + if existing is not None and not hasattr(existing, "__path__"): + from warnings import warn + msg = "{mod_name!r} found in sys.modules after import of " \ + "package {pkg_name!r}, but prior to execution of " \ + "{mod_name!r}; this may result in unpredictable " \ + "behaviour".format(mod_name=mod_name, pkg_name=pkg_name) + warn(RuntimeWarning(msg)) + + try: + spec = importlib.util.find_spec(mod_name) + except (ImportError, AttributeError, TypeError, ValueError) as ex: + # This hack fixes an impedance mismatch between pkgutil and + # importlib, where the latter raises other errors for cases where + # pkgutil previously raised ImportError + msg = "Error while finding module specification for {!r} ({}: {})" + if mod_name.endswith(".py"): + msg += (f". Try using '{mod_name[:-3]}' instead of " + f"'{mod_name}' as the module name.") + raise error(msg.format(mod_name, type(ex).__name__, ex)) from ex + if spec is None: + raise error("No module named %s" % mod_name) + if spec.submodule_search_locations is not None: + if mod_name == "__main__" or mod_name.endswith(".__main__"): + raise error("Cannot use package as __main__ module") + try: + pkg_main_name = mod_name + ".__main__" + return _get_module_details(pkg_main_name, error) + except error as e: + if mod_name not in sys.modules: + raise # No module loaded; being a package is irrelevant + raise error(("%s; %r is a package and cannot " + + "be directly executed") % (e, mod_name)) + loader = spec.loader + if loader is None: + raise error("%r is a namespace package and cannot be executed" + % mod_name) + try: + code = loader.get_code(mod_name) + except ImportError as e: + raise error(format(e)) from e + if code is None: + raise error("No code object available for %s" % mod_name) + return mod_name, spec, code + + +class _Error(Exception): + """Error that _run_module_as_main() should report without a traceback""" + + +# XXX ncoghlan: Should this be documented and made public? +# (Current thoughts: don't repeat the mistake that lead to its +# creation when run_module() no longer met the needs of +# mainmodule.c, but couldn't be changed because it was public) +def _run_module_as_main(mod_name, alter_argv=True): + """Runs the designated module in the __main__ namespace + + Note that the executed module will have full access to the + __main__ namespace. If this is not desirable, the run_module() + function should be used to run the module code in a fresh namespace. + + At the very least, these variables in __main__ will be overwritten: + __name__ + __file__ + __cached__ + __loader__ + __package__ + """ + try: + if alter_argv or mod_name != "__main__": # i.e. -m switch + mod_name, mod_spec, code = _get_module_details(mod_name, _Error) + else: # i.e. directory or zipfile execution + mod_name, mod_spec, code = _get_main_module_details(_Error) + except _Error as exc: + msg = "%s: %s" % (sys.executable, exc) + sys.exit(msg) + main_globals = sys.modules["__main__"].__dict__ + if alter_argv: + sys.argv[0] = mod_spec.origin + return _run_code(code, main_globals, None, + "__main__", mod_spec) + + +def run_module(mod_name, init_globals=None, + run_name=None, alter_sys=False): + """Execute a module's code without importing it + + Returns the resulting top level namespace dictionary + """ + mod_name, mod_spec, code = _get_module_details(mod_name) + if run_name is None: + run_name = mod_name + if alter_sys: + return _run_module_code(code, init_globals, run_name, mod_spec) + else: + # Leave the sys module alone + return _run_code(code, {}, init_globals, run_name, mod_spec) + + +def _get_main_module_details(error=ImportError): + # Helper that gives a nicer error message when attempting to + # execute a zipfile or directory by invoking __main__.py + # Also moves the standard __main__ out of the way so that the + # preexisting __loader__ entry doesn't cause issues + main_name = "__main__" + saved_main = sys.modules[main_name] + del sys.modules[main_name] + try: + return _get_module_details(main_name) + except ImportError as exc: + if main_name in str(exc): + raise error("can't find %r module in %r" % + (main_name, sys.path[0])) from exc + raise + finally: + sys.modules[main_name] = saved_main + + +try: + io_open_code = io.open_code +except AttributeError: + # Compatibility with Python 3.6/3.7 + import tokenize + io_open_code = tokenize.open + + +def _get_code_from_file(run_name, fname): + # Check for a compiled file first + from pkgutil import read_code + decoded_path = os.path.abspath(os.fsdecode(fname)) + with io_open_code(decoded_path) as f: + code = read_code(f) + if code is None: + # That didn't work, so try it as normal source code + with io_open_code(decoded_path) as f: + code = compile(f.read(), fname, 'exec') + return code, fname + + +def run_path(path_name, init_globals=None, run_name=None): + """Execute code located at the specified filesystem location + + Returns the resulting top level namespace dictionary + + The file path may refer directly to a Python script (i.e. + one that could be directly executed with execfile) or else + it may refer to a zipfile or directory containing a top + level __main__.py script. + """ + if run_name is None: + run_name = "" + pkg_name = run_name.rpartition(".")[0] + importer = pkgutil_get_importer(path_name) + # Trying to avoid importing imp so as to not consume the deprecation warning. + is_NullImporter = False + if type(importer).__module__ == 'imp': + if type(importer).__name__ == 'NullImporter': + is_NullImporter = True + if isinstance(importer, type(None)) or is_NullImporter: + # Not a valid sys.path entry, so run the code directly + # execfile() doesn't help as we want to allow compiled files + code, fname = _get_code_from_file(run_name, path_name) + return _run_module_code(code, init_globals, run_name, + pkg_name=pkg_name, script_name=fname) + else: + # Finder is defined for path, so add it to + # the start of sys.path + sys.path.insert(0, path_name) + try: + # Here's where things are a little different from the run_module + # case. There, we only had to replace the module in sys while the + # code was running and doing so was somewhat optional. Here, we + # have no choice and we have to remove it even while we read the + # code. If we don't do this, a __loader__ attribute in the + # existing __main__ module may prevent location of the new module. + mod_name, mod_spec, code = _get_main_module_details() + with _TempModule(run_name) as temp_module, \ + _ModifiedArgv0(path_name): + mod_globals = temp_module.module.__dict__ + return _run_code(code, mod_globals, init_globals, + run_name, mod_spec, pkg_name).copy() + finally: + try: + sys.path.remove(path_name) + except ValueError: + pass + + +if __name__ == "__main__": + # Run the module specified as the next command line argument + if len(sys.argv) < 2: + print("No module specified for execution", file=sys.stderr) + else: + del sys.argv[0] # Make the requested module sys.argv[0] + _run_module_as_main(sys.argv[0]) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_safe_repr.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_safe_repr.py new file mode 100644 index 0000000..f1b64b7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_safe_repr.py @@ -0,0 +1,399 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +# Gotten from ptvsd for supporting the format expected there. +import sys +from _pydevd_bundle.pydevd_constants import IS_PY36_OR_GREATER +import locale +from _pydev_bundle import pydev_log + + +class SafeRepr(object): + # Can be used to override the encoding from locale.getpreferredencoding() + locale_preferred_encoding = None + + # Can be used to override the encoding used for sys.stdout.encoding + sys_stdout_encoding = None + + # String types are truncated to maxstring_outer when at the outer- + # most level, and truncated to maxstring_inner characters inside + # collections. + maxstring_outer = 2 ** 16 + maxstring_inner = 30 + string_types = (str, bytes) + bytes = bytes + set_info = (set, '{', '}', False) + frozenset_info = (frozenset, 'frozenset({', '})', False) + int_types = (int,) + long_iter_types = (list, tuple, bytearray, range, + dict, set, frozenset) + + # Collection types are recursively iterated for each limit in + # maxcollection. + maxcollection = (15, 10) + + # Specifies type, prefix string, suffix string, and whether to include a + # comma if there is only one element. (Using a sequence rather than a + # mapping because we use isinstance() to determine the matching type.) + collection_types = [ + (tuple, '(', ')', True), + (list, '[', ']', False), + frozenset_info, + set_info, + ] + try: + from collections import deque + collection_types.append((deque, 'deque([', '])', False)) + except Exception: + pass + + # type, prefix string, suffix string, item prefix string, + # item key/value separator, item suffix string + dict_types = [(dict, '{', '}', '', ': ', '')] + try: + from collections import OrderedDict + dict_types.append((OrderedDict, 'OrderedDict([', '])', '(', ', ', ')')) + except Exception: + pass + + # All other types are treated identically to strings, but using + # different limits. + maxother_outer = 2 ** 16 + maxother_inner = 30 + + convert_to_hex = False + raw_value = False + + def __call__(self, obj): + ''' + :param object obj: + The object for which we want a representation. + + :return str: + Returns bytes encoded as utf-8 on py2 and str on py3. + ''' + try: + return ''.join(self._repr(obj, 0)) + except Exception: + try: + return 'An exception was raised: %r' % sys.exc_info()[1] + except Exception: + return 'An exception was raised' + + def _repr(self, obj, level): + '''Returns an iterable of the parts in the final repr string.''' + + try: + obj_repr = type(obj).__repr__ + except Exception: + obj_repr = None + + def has_obj_repr(t): + r = t.__repr__ + try: + return obj_repr == r + except Exception: + return obj_repr is r + + for t, prefix, suffix, comma in self.collection_types: + if isinstance(obj, t) and has_obj_repr(t): + return self._repr_iter(obj, level, prefix, suffix, comma) + + for t, prefix, suffix, item_prefix, item_sep, item_suffix in self.dict_types: # noqa + if isinstance(obj, t) and has_obj_repr(t): + return self._repr_dict(obj, level, prefix, suffix, + item_prefix, item_sep, item_suffix) + + for t in self.string_types: + if isinstance(obj, t) and has_obj_repr(t): + return self._repr_str(obj, level) + + if self._is_long_iter(obj): + return self._repr_long_iter(obj) + + return self._repr_other(obj, level) + + # Determines whether an iterable exceeds the limits set in + # maxlimits, and is therefore unsafe to repr(). + def _is_long_iter(self, obj, level=0): + try: + # Strings have their own limits (and do not nest). Because + # they don't have __iter__ in 2.x, this check goes before + # the next one. + if isinstance(obj, self.string_types): + return len(obj) > self.maxstring_inner + + # If it's not an iterable (and not a string), it's fine. + if not hasattr(obj, '__iter__'): + return False + + # If it's not an instance of these collection types then it + # is fine. Note: this is a fix for + # https://github.com/Microsoft/ptvsd/issues/406 + if not isinstance(obj, self.long_iter_types): + return False + + # Iterable is its own iterator - this is a one-off iterable + # like generator or enumerate(). We can't really count that, + # but repr() for these should not include any elements anyway, + # so we can treat it the same as non-iterables. + if obj is iter(obj): + return False + + # range reprs fine regardless of length. + if isinstance(obj, range): + return False + + # numpy and scipy collections (ndarray etc) have + # self-truncating repr, so they're always safe. + try: + module = type(obj).__module__.partition('.')[0] + if module in ('numpy', 'scipy'): + return False + except Exception: + pass + + # Iterables that nest too deep are considered long. + if level >= len(self.maxcollection): + return True + + # It is too long if the length exceeds the limit, or any + # of its elements are long iterables. + if hasattr(obj, '__len__'): + try: + size = len(obj) + except Exception: + size = None + if size is not None and size > self.maxcollection[level]: + return True + return any((self._is_long_iter(item, level + 1) for item in obj)) # noqa + return any(i > self.maxcollection[level] or self._is_long_iter(item, level + 1) for i, item in enumerate(obj)) # noqa + + except Exception: + # If anything breaks, assume the worst case. + return True + + def _repr_iter(self, obj, level, prefix, suffix, + comma_after_single_element=False): + yield prefix + + if level >= len(self.maxcollection): + yield '...' + else: + count = self.maxcollection[level] + yield_comma = False + for item in obj: + if yield_comma: + yield ', ' + yield_comma = True + + count -= 1 + if count <= 0: + yield '...' + break + + for p in self._repr(item, 100 if item is obj else level + 1): + yield p + else: + if comma_after_single_element: + if count == self.maxcollection[level] - 1: + yield ',' + yield suffix + + def _repr_long_iter(self, obj): + try: + length = hex(len(obj)) if self.convert_to_hex else len(obj) + obj_repr = '<%s, len() = %s>' % (type(obj).__name__, length) + except Exception: + try: + obj_repr = '<' + type(obj).__name__ + '>' + except Exception: + obj_repr = '' + yield obj_repr + + def _repr_dict(self, obj, level, prefix, suffix, + item_prefix, item_sep, item_suffix): + if not obj: + yield prefix + suffix + return + if level >= len(self.maxcollection): + yield prefix + '...' + suffix + return + + yield prefix + + count = self.maxcollection[level] + yield_comma = False + + if IS_PY36_OR_GREATER: + # On Python 3.6 (onwards) dictionaries now keep + # insertion order. + sorted_keys = list(obj) + else: + try: + sorted_keys = sorted(obj) + except Exception: + sorted_keys = list(obj) + + for key in sorted_keys: + if yield_comma: + yield ', ' + yield_comma = True + + count -= 1 + if count <= 0: + yield '...' + break + + yield item_prefix + for p in self._repr(key, level + 1): + yield p + + yield item_sep + + try: + item = obj[key] + except Exception: + yield '' + else: + for p in self._repr(item, 100 if item is obj else level + 1): + yield p + yield item_suffix + + yield suffix + + def _repr_str(self, obj, level): + try: + if self.raw_value: + # For raw value retrieval, ignore all limits. + if isinstance(obj, bytes): + yield obj.decode('latin-1') + else: + yield obj + return + + limit_inner = self.maxother_inner + limit_outer = self.maxother_outer + limit = limit_inner if level > 0 else limit_outer + if len(obj) <= limit: + # Note that we check the limit before doing the repr (so, the final string + # may actually be considerably bigger on some cases, as besides + # the additional u, b, ' chars, some chars may be escaped in repr, so + # even a single char such as \U0010ffff may end up adding more + # chars than expected). + yield self._convert_to_unicode_or_bytes_repr(repr(obj)) + return + + # Slightly imprecise calculations - we may end up with a string that is + # up to 6 characters longer than limit. If you need precise formatting, + # you are using the wrong class. + left_count, right_count = max(1, int(2 * limit / 3)), max(1, int(limit / 3)) # noqa + + # Important: only do repr after slicing to avoid duplicating a byte array that could be + # huge. + + # Note: we don't deal with high surrogates here because we're not dealing with the + # repr() of a random object. + # i.e.: A high surrogate unicode char may be splitted on Py2, but as we do a `repr` + # afterwards, that's ok. + + # Also, we just show the unicode/string/bytes repr() directly to make clear what the + # input type was (so, on py2 a unicode would start with u' and on py3 a bytes would + # start with b'). + + part1 = obj[:left_count] + part1 = repr(part1) + part1 = part1[:part1.rindex("'")] # Remove the last ' + + part2 = obj[-right_count:] + part2 = repr(part2) + part2 = part2[part2.index("'") + 1:] # Remove the first ' (and possibly u or b). + + yield part1 + yield '...' + yield part2 + except: + # This shouldn't really happen, but let's play it safe. + pydev_log.exception('Error getting string representation to show.') + for part in self._repr_obj(obj, level, + self.maxother_inner, self.maxother_outer): + yield part + + def _repr_other(self, obj, level): + return self._repr_obj(obj, level, + self.maxother_inner, self.maxother_outer) + + def _repr_obj(self, obj, level, limit_inner, limit_outer): + try: + if self.raw_value: + # For raw value retrieval, ignore all limits. + if isinstance(obj, bytes): + yield obj.decode('latin-1') + return + + try: + mv = memoryview(obj) + except Exception: + yield self._convert_to_unicode_or_bytes_repr(repr(obj)) + return + else: + # Map bytes to Unicode codepoints with same values. + yield mv.tobytes().decode('latin-1') + return + elif self.convert_to_hex and isinstance(obj, self.int_types): + obj_repr = hex(obj) + else: + obj_repr = repr(obj) + except Exception: + try: + obj_repr = object.__repr__(obj) + except Exception: + try: + obj_repr = '' # noqa + except Exception: + obj_repr = '' + + limit = limit_inner if level > 0 else limit_outer + + if limit >= len(obj_repr): + yield self._convert_to_unicode_or_bytes_repr(obj_repr) + return + + # Slightly imprecise calculations - we may end up with a string that is + # up to 3 characters longer than limit. If you need precise formatting, + # you are using the wrong class. + left_count, right_count = max(1, int(2 * limit / 3)), max(1, int(limit / 3)) # noqa + + yield obj_repr[:left_count] + yield '...' + yield obj_repr[-right_count:] + + def _convert_to_unicode_or_bytes_repr(self, obj_repr): + return obj_repr + + def _bytes_as_unicode_if_possible(self, obj_repr): + # We try to decode with 3 possible encoding (sys.stdout.encoding, + # locale.getpreferredencoding() and 'utf-8). If no encoding can decode + # the input, we return the original bytes. + try_encodings = [] + encoding = self.sys_stdout_encoding or getattr(sys.stdout, 'encoding', '') + if encoding: + try_encodings.append(encoding.lower()) + + preferred_encoding = self.locale_preferred_encoding or locale.getpreferredencoding() + if preferred_encoding: + preferred_encoding = preferred_encoding.lower() + if preferred_encoding not in try_encodings: + try_encodings.append(preferred_encoding) + + if 'utf-8' not in try_encodings: + try_encodings.append('utf-8') + + for encoding in try_encodings: + try: + return obj_repr.decode(encoding) + except UnicodeDecodeError: + pass + + return obj_repr # Return the original version (in bytes) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_save_locals.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_save_locals.py new file mode 100644 index 0000000..fa1a125 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_save_locals.py @@ -0,0 +1,96 @@ +""" +Utility for saving locals. +""" +import sys + +try: + import types + + frame_type = types.FrameType +except: + frame_type = type(sys._getframe()) + + +def is_save_locals_available(): + return save_locals_impl is not None + + +def save_locals(frame): + """ + Copy values from locals_dict into the fast stack slots in the given frame. + + Note: the 'save_locals' branch had a different approach wrapping the frame (much more code, but it gives ideas + on how to save things partially, not the 'whole' locals). + """ + if not isinstance(frame, frame_type): + # Fix exception when changing Django variable (receiving DjangoTemplateFrame) + return + + if save_locals_impl is not None: + try: + save_locals_impl(frame) + except: + pass + + +def make_save_locals_impl(): + """ + Factory for the 'save_locals_impl' method. This may seem like a complicated pattern but it is essential that the method is created at + module load time. Inner imports after module load time would cause an occasional debugger deadlock due to the importer lock and debugger + lock being taken in different order in different threads. + """ + try: + if '__pypy__' in sys.builtin_module_names: + import __pypy__ # @UnresolvedImport + save_locals = __pypy__.locals_to_fast + except: + pass + else: + if '__pypy__' in sys.builtin_module_names: + + def save_locals_pypy_impl(frame): + save_locals(frame) + + return save_locals_pypy_impl + + try: + import ctypes + locals_to_fast = ctypes.pythonapi.PyFrame_LocalsToFast + except: + pass + else: + + def save_locals_ctypes_impl(frame): + locals_to_fast(ctypes.py_object(frame), ctypes.c_int(0)) + + return save_locals_ctypes_impl + + return None + + +save_locals_impl = make_save_locals_impl() + + +def update_globals_and_locals(updated_globals, initial_globals, frame): + # We don't have the locals and passed all in globals, so, we have to + # manually choose how to update the variables. + # + # Note that the current implementation is a bit tricky: it does work in general + # but if we do something as 'some_var = 10' and 'some_var' is already defined to have + # the value '10' in the globals, we won't actually put that value in the locals + # (which means that the frame locals won't be updated). + # Still, the approach to have a single namespace was chosen because it was the only + # one that enabled creating and using variables during the same evaluation. + assert updated_globals is not None + f_locals = None + for key, val in updated_globals.items(): + if initial_globals.get(key) is not val: + if f_locals is None: + # Note: we call f_locals only once because each time + # we call it the values may be reset. + f_locals = frame.f_locals + + f_locals[key] = val + + if f_locals is not None: + save_locals(frame) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_signature.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_signature.py new file mode 100644 index 0000000..3877e62 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_signature.py @@ -0,0 +1,201 @@ +from _pydev_bundle import pydev_log + +try: + import trace +except ImportError: + pass +else: + trace._warn = lambda *args: None # workaround for http://bugs.python.org/issue17143 (PY-8706) + +import os +from _pydevd_bundle.pydevd_comm import CMD_SIGNATURE_CALL_TRACE, NetCommand +from _pydevd_bundle import pydevd_xml +from _pydevd_bundle.pydevd_utils import get_clsname_for_code + + +class Signature(object): + + def __init__(self, file, name): + self.file = file + self.name = name + self.args = [] + self.args_str = [] + self.return_type = None + + def add_arg(self, name, type): + self.args.append((name, type)) + self.args_str.append("%s:%s" % (name, type)) + + def set_args(self, frame, recursive=False): + self.args = [] + + code = frame.f_code + locals = frame.f_locals + + for i in range(0, code.co_argcount): + name = code.co_varnames[i] + class_name = get_type_of_value(locals[name], recursive=recursive) + + self.add_arg(name, class_name) + + def __str__(self): + return "%s %s(%s)" % (self.file, self.name, ", ".join(self.args_str)) + + +def get_type_of_value(value, ignore_module_name=('__main__', '__builtin__', 'builtins'), recursive=False): + tp = type(value) + class_name = tp.__name__ + if class_name == 'instance': # old-style classes + tp = value.__class__ + class_name = tp.__name__ + + if hasattr(tp, '__module__') and tp.__module__ and tp.__module__ not in ignore_module_name: + class_name = "%s.%s" % (tp.__module__, class_name) + + if class_name == 'list': + class_name = 'List' + if len(value) > 0 and recursive: + class_name += '[%s]' % get_type_of_value(value[0], recursive=recursive) + return class_name + + if class_name == 'dict': + class_name = 'Dict' + if len(value) > 0 and recursive: + for (k, v) in value.items(): + class_name += '[%s, %s]' % (get_type_of_value(k, recursive=recursive), + get_type_of_value(v, recursive=recursive)) + break + return class_name + + if class_name == 'tuple': + class_name = 'Tuple' + if len(value) > 0 and recursive: + class_name += '[' + class_name += ', '.join(get_type_of_value(v, recursive=recursive) for v in value) + class_name += ']' + + return class_name + + +def _modname(path): + """Return a plausible module name for the path""" + base = os.path.basename(path) + filename, ext = os.path.splitext(base) + return filename + + +class SignatureFactory(object): + + def __init__(self): + self._caller_cache = {} + self.cache = CallSignatureCache() + + def create_signature(self, frame, filename, with_args=True): + try: + _, modulename, funcname = self.file_module_function_of(frame) + signature = Signature(filename, funcname) + if with_args: + signature.set_args(frame, recursive=True) + return signature + except: + pydev_log.exception() + + def file_module_function_of(self, frame): # this code is take from trace module and fixed to work with new-style classes + code = frame.f_code + filename = code.co_filename + if filename: + modulename = _modname(filename) + else: + modulename = None + + funcname = code.co_name + clsname = None + if code in self._caller_cache: + if self._caller_cache[code] is not None: + clsname = self._caller_cache[code] + else: + self._caller_cache[code] = None + clsname = get_clsname_for_code(code, frame) + if clsname is not None: + # cache the result - assumption is that new.* is + # not called later to disturb this relationship + # _caller_cache could be flushed if functions in + # the new module get called. + self._caller_cache[code] = clsname + + if clsname is not None: + funcname = "%s.%s" % (clsname, funcname) + + return filename, modulename, funcname + + +def get_signature_info(signature): + return signature.file, signature.name, ' '.join([arg[1] for arg in signature.args]) + + +def get_frame_info(frame): + co = frame.f_code + return co.co_name, frame.f_lineno, co.co_filename + + +class CallSignatureCache(object): + + def __init__(self): + self.cache = {} + + def add(self, signature): + filename, name, args_type = get_signature_info(signature) + calls_from_file = self.cache.setdefault(filename, {}) + name_calls = calls_from_file.setdefault(name, {}) + name_calls[args_type] = None + + def is_in_cache(self, signature): + filename, name, args_type = get_signature_info(signature) + if args_type in self.cache.get(filename, {}).get(name, {}): + return True + return False + + +def create_signature_message(signature): + cmdTextList = [""] + + cmdTextList.append('' % (pydevd_xml.make_valid_xml_value(signature.file), pydevd_xml.make_valid_xml_value(signature.name))) + + for arg in signature.args: + cmdTextList.append('' % (pydevd_xml.make_valid_xml_value(arg[0]), pydevd_xml.make_valid_xml_value(arg[1]))) + + if signature.return_type is not None: + cmdTextList.append('' % (pydevd_xml.make_valid_xml_value(signature.return_type))) + + cmdTextList.append("") + cmdText = ''.join(cmdTextList) + return NetCommand(CMD_SIGNATURE_CALL_TRACE, 0, cmdText) + + +def send_signature_call_trace(dbg, frame, filename): + if dbg.signature_factory and dbg.in_project_scope(frame): + signature = dbg.signature_factory.create_signature(frame, filename) + if signature is not None: + if dbg.signature_factory.cache is not None: + if not dbg.signature_factory.cache.is_in_cache(signature): + dbg.signature_factory.cache.add(signature) + dbg.writer.add_command(create_signature_message(signature)) + return True + else: + # we don't send signature if it is cached + return False + else: + dbg.writer.add_command(create_signature_message(signature)) + return True + return False + + +def send_signature_return_trace(dbg, frame, filename, return_value): + if dbg.signature_factory and dbg.in_project_scope(frame): + signature = dbg.signature_factory.create_signature(frame, filename, with_args=False) + signature.return_type = get_type_of_value(return_value, recursive=True) + dbg.writer.add_command(create_signature_message(signature)) + return True + + return False + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_source_mapping.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_source_mapping.py new file mode 100644 index 0000000..5455307 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_source_mapping.py @@ -0,0 +1,153 @@ +import bisect +from _pydevd_bundle.pydevd_constants import NULL, KeyifyList +import pydevd_file_utils + + +class SourceMappingEntry(object): + + __slots__ = ['source_filename', 'line', 'end_line', 'runtime_line', 'runtime_source'] + + def __init__(self, line, end_line, runtime_line, runtime_source): + assert isinstance(runtime_source, str) + + self.line = int(line) + self.end_line = int(end_line) + self.runtime_line = int(runtime_line) + self.runtime_source = runtime_source # Something as + + # Should be set after translated to server (absolute_source_filename). + # This is what's sent to the client afterwards (so, its case should not be normalized). + self.source_filename = None + + def contains_line(self, i): + return self.line <= i <= self.end_line + + def contains_runtime_line(self, i): + line_count = self.end_line + self.line + runtime_end_line = self.runtime_line + line_count + return self.runtime_line <= i <= runtime_end_line + + def __str__(self): + return 'SourceMappingEntry(%s)' % ( + ', '.join('%s=%r' % (attr, getattr(self, attr)) for attr in self.__slots__)) + + __repr__ = __str__ + + +class SourceMapping(object): + + def __init__(self, on_source_mapping_changed=NULL): + self._mappings_to_server = {} # dict(normalized(file.py) to [SourceMappingEntry]) + self._mappings_to_client = {} # dict( to File.py) + self._cache = {} + self._on_source_mapping_changed = on_source_mapping_changed + + def set_source_mapping(self, absolute_filename, mapping): + ''' + :param str absolute_filename: + The filename for the source mapping (bytes on py2 and str on py3). + + :param list(SourceMappingEntry) mapping: + A list with the source mapping entries to be applied to the given filename. + + :return str: + An error message if it was not possible to set the mapping or an empty string if + everything is ok. + ''' + # Let's first validate if it's ok to apply that mapping. + # File mappings must be 1:N, not M:N (i.e.: if there's a mapping from file1.py to , + # there can be no other mapping from any other file to ). + # This is a limitation to make it easier to remove existing breakpoints when new breakpoints are + # set to a file (so, any file matching that breakpoint can be removed instead of needing to check + # which lines are corresponding to that file). + for map_entry in mapping: + existing_source_filename = self._mappings_to_client.get(map_entry.runtime_source) + if existing_source_filename and existing_source_filename != absolute_filename: + return 'Cannot apply mapping from %s to %s (it conflicts with mapping: %s to %s)' % ( + absolute_filename, map_entry.runtime_source, existing_source_filename, map_entry.runtime_source) + + try: + absolute_normalized_filename = pydevd_file_utils.normcase(absolute_filename) + current_mapping = self._mappings_to_server.get(absolute_normalized_filename, []) + for map_entry in current_mapping: + del self._mappings_to_client[map_entry.runtime_source] + + self._mappings_to_server[absolute_normalized_filename] = sorted(mapping, key=lambda entry:entry.line) + + for map_entry in mapping: + self._mappings_to_client[map_entry.runtime_source] = absolute_filename + finally: + self._cache.clear() + self._on_source_mapping_changed() + return '' + + def map_to_client(self, runtime_source_filename, lineno): + key = (lineno, 'client', runtime_source_filename) + try: + return self._cache[key] + except KeyError: + for _, mapping in list(self._mappings_to_server.items()): + for map_entry in mapping: + if map_entry.runtime_source == runtime_source_filename: # + if map_entry.contains_runtime_line(lineno): # matches line range + self._cache[key] = (map_entry.source_filename, map_entry.line + (lineno - map_entry.runtime_line), True) + return self._cache[key] + + self._cache[key] = (runtime_source_filename, lineno, False) # Mark that no translation happened in the cache. + return self._cache[key] + + def has_mapping_entry(self, runtime_source_filename): + ''' + :param runtime_source_filename: + Something as + ''' + # Note that we're not interested in the line here, just on knowing if a given filename + # (from the server) has a mapping for it. + key = ('has_entry', runtime_source_filename) + try: + return self._cache[key] + except KeyError: + for _absolute_normalized_filename, mapping in list(self._mappings_to_server.items()): + for map_entry in mapping: + if map_entry.runtime_source == runtime_source_filename: + self._cache[key] = True + return self._cache[key] + + self._cache[key] = False + return self._cache[key] + + def map_to_server(self, absolute_filename, lineno): + ''' + Convert something as 'file1.py' at line 10 to '' at line 2. + + Note that the name should be already normalized at this point. + ''' + absolute_normalized_filename = pydevd_file_utils.normcase(absolute_filename) + + changed = False + mappings = self._mappings_to_server.get(absolute_normalized_filename) + if mappings: + + i = bisect.bisect(KeyifyList(mappings, lambda entry:entry.line), lineno) + if i >= len(mappings): + i -= 1 + + if i == 0: + entry = mappings[i] + + else: + entry = mappings[i - 1] + + if not entry.contains_line(lineno): + entry = mappings[i] + if not entry.contains_line(lineno): + entry = None + + if entry is not None: + lineno = entry.runtime_line + (lineno - entry.line) + + absolute_filename = entry.runtime_source + changed = True + + return absolute_filename, lineno, changed + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_stackless.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_stackless.py new file mode 100644 index 0000000..44bb768 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_stackless.py @@ -0,0 +1,416 @@ +from __future__ import nested_scopes + +import weakref +import sys + +from _pydevd_bundle.pydevd_comm import get_global_debugger +from _pydevd_bundle.pydevd_constants import call_only_once +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_custom_frames import update_custom_frame, remove_custom_frame, add_custom_frame +import stackless # @UnresolvedImport +from _pydev_bundle import pydev_log + + +# Used so that we don't loose the id (because we'll remove when it's not alive and would generate a new id for the +# same tasklet). +class TaskletToLastId: + ''' + So, why not a WeakKeyDictionary? + The problem is that removals from the WeakKeyDictionary will create a new tasklet (as it adds a callback to + remove the key when it's garbage-collected), so, we can get into a recursion. + ''' + + def __init__(self): + self.tasklet_ref_to_last_id = {} + self._i = 0 + + def get(self, tasklet): + return self.tasklet_ref_to_last_id.get(weakref.ref(tasklet)) + + def __setitem__(self, tasklet, last_id): + self.tasklet_ref_to_last_id[weakref.ref(tasklet)] = last_id + self._i += 1 + if self._i % 100 == 0: # Collect at each 100 additions to the dict (no need to rush). + for tasklet_ref in list(self.tasklet_ref_to_last_id.keys()): + if tasklet_ref() is None: + del self.tasklet_ref_to_last_id[tasklet_ref] + + +_tasklet_to_last_id = TaskletToLastId() + + +#======================================================================================================================= +# _TaskletInfo +#======================================================================================================================= +class _TaskletInfo: + + _last_id = 0 + + def __init__(self, tasklet_weakref, tasklet): + self.frame_id = None + self.tasklet_weakref = tasklet_weakref + + last_id = _tasklet_to_last_id.get(tasklet) + if last_id is None: + _TaskletInfo._last_id += 1 + last_id = _TaskletInfo._last_id + _tasklet_to_last_id[tasklet] = last_id + + self._tasklet_id = last_id + + self.update_name() + + def update_name(self): + tasklet = self.tasklet_weakref() + if tasklet: + if tasklet.blocked: + state = 'blocked' + elif tasklet.paused: + state = 'paused' + elif tasklet.scheduled: + state = 'scheduled' + else: + state = '' + + try: + name = tasklet.name + except AttributeError: + if tasklet.is_main: + name = 'MainTasklet' + else: + name = 'Tasklet-%s' % (self._tasklet_id,) + + thread_id = tasklet.thread_id + if thread_id != -1: + for thread in threading.enumerate(): + if thread.ident == thread_id: + if thread.name: + thread_name = "of %s" % (thread.name,) + else: + thread_name = "of Thread-%s" % (thread.name or str(thread_id),) + break + else: + # should not happen. + thread_name = "of Thread-%s" % (str(thread_id),) + thread = None + else: + # tasklet is no longer bound to a thread, because its thread ended + thread_name = "without thread" + + tid = id(tasklet) + tasklet = None + else: + state = 'dead' + name = 'Tasklet-%s' % (self._tasklet_id,) + thread_name = "" + tid = '-' + self.tasklet_name = '%s %s %s (%s)' % (state, name, thread_name, tid) + + if not hasattr(stackless.tasklet, "trace_function"): + + # bug https://bitbucket.org/stackless-dev/stackless/issue/42 + # is not fixed. Stackless releases before 2014 + def update_name(self): + tasklet = self.tasklet_weakref() + if tasklet: + try: + name = tasklet.name + except AttributeError: + if tasklet.is_main: + name = 'MainTasklet' + else: + name = 'Tasklet-%s' % (self._tasklet_id,) + + thread_id = tasklet.thread_id + for thread in threading.enumerate(): + if thread.ident == thread_id: + if thread.name: + thread_name = "of %s" % (thread.name,) + else: + thread_name = "of Thread-%s" % (thread.name or str(thread_id),) + break + else: + # should not happen. + thread_name = "of Thread-%s" % (str(thread_id),) + thread = None + + tid = id(tasklet) + tasklet = None + else: + name = 'Tasklet-%s' % (self._tasklet_id,) + thread_name = "" + tid = '-' + self.tasklet_name = '%s %s (%s)' % (name, thread_name, tid) + + +_weak_tasklet_registered_to_info = {} + + +#======================================================================================================================= +# get_tasklet_info +#======================================================================================================================= +def get_tasklet_info(tasklet): + return register_tasklet_info(tasklet) + + +#======================================================================================================================= +# register_tasklet_info +#======================================================================================================================= +def register_tasklet_info(tasklet): + r = weakref.ref(tasklet) + info = _weak_tasklet_registered_to_info.get(r) + if info is None: + info = _weak_tasklet_registered_to_info[r] = _TaskletInfo(r, tasklet) + + return info + + +_application_set_schedule_callback = None + + +#======================================================================================================================= +# _schedule_callback +#======================================================================================================================= +def _schedule_callback(prev, next): + ''' + Called when a context is stopped or a new context is made runnable. + ''' + try: + if not prev and not next: + return + + current_frame = sys._getframe() + + if next: + register_tasklet_info(next) + + # Ok, making next runnable: set the tracing facility in it. + debugger = get_global_debugger() + if debugger is not None: + next.trace_function = debugger.get_thread_local_trace_func() + frame = next.frame + if frame is current_frame: + frame = frame.f_back + if hasattr(frame, 'f_trace'): # Note: can be None (but hasattr should cover for that too). + frame.f_trace = debugger.get_thread_local_trace_func() + + debugger = None + + if prev: + register_tasklet_info(prev) + + try: + for tasklet_ref, tasklet_info in list(_weak_tasklet_registered_to_info.items()): # Make sure it's a copy! + tasklet = tasklet_ref() + if tasklet is None or not tasklet.alive: + # Garbage-collected already! + try: + del _weak_tasklet_registered_to_info[tasklet_ref] + except KeyError: + pass + if tasklet_info.frame_id is not None: + remove_custom_frame(tasklet_info.frame_id) + else: + is_running = stackless.get_thread_info(tasklet.thread_id)[1] is tasklet + if tasklet is prev or (tasklet is not next and not is_running): + # the tasklet won't run after this scheduler action: + # - the tasklet is the previous tasklet + # - it is not the next tasklet and it is not an already running tasklet + frame = tasklet.frame + if frame is current_frame: + frame = frame.f_back + if frame is not None: + # print >>sys.stderr, "SchedCB: %r, %d, '%s', '%s'" % (tasklet, frame.f_lineno, _filename, base) + debugger = get_global_debugger() + if debugger is not None and debugger.get_file_type(frame) is None: + tasklet_info.update_name() + if tasklet_info.frame_id is None: + tasklet_info.frame_id = add_custom_frame(frame, tasklet_info.tasklet_name, tasklet.thread_id) + else: + update_custom_frame(tasklet_info.frame_id, frame, tasklet.thread_id, name=tasklet_info.tasklet_name) + debugger = None + + elif tasklet is next or is_running: + if tasklet_info.frame_id is not None: + # Remove info about stackless suspended when it starts to run. + remove_custom_frame(tasklet_info.frame_id) + tasklet_info.frame_id = None + + finally: + tasklet = None + tasklet_info = None + frame = None + + except: + pydev_log.exception() + + if _application_set_schedule_callback is not None: + return _application_set_schedule_callback(prev, next) + + +if not hasattr(stackless.tasklet, "trace_function"): + + # Older versions of Stackless, released before 2014 + # This code does not work reliable! It is affected by several + # stackless bugs: Stackless issues #44, #42, #40 + def _schedule_callback(prev, next): + ''' + Called when a context is stopped or a new context is made runnable. + ''' + try: + if not prev and not next: + return + + if next: + register_tasklet_info(next) + + # Ok, making next runnable: set the tracing facility in it. + debugger = get_global_debugger() + if debugger is not None and next.frame: + if hasattr(next.frame, 'f_trace'): + next.frame.f_trace = debugger.get_thread_local_trace_func() + debugger = None + + if prev: + register_tasklet_info(prev) + + try: + for tasklet_ref, tasklet_info in list(_weak_tasklet_registered_to_info.items()): # Make sure it's a copy! + tasklet = tasklet_ref() + if tasklet is None or not tasklet.alive: + # Garbage-collected already! + try: + del _weak_tasklet_registered_to_info[tasklet_ref] + except KeyError: + pass + if tasklet_info.frame_id is not None: + remove_custom_frame(tasklet_info.frame_id) + else: + if tasklet.paused or tasklet.blocked or tasklet.scheduled: + if tasklet.frame and tasklet.frame.f_back: + f_back = tasklet.frame.f_back + debugger = get_global_debugger() + if debugger is not None and debugger.get_file_type(f_back) is None: + if tasklet_info.frame_id is None: + tasklet_info.frame_id = add_custom_frame(f_back, tasklet_info.tasklet_name, tasklet.thread_id) + else: + update_custom_frame(tasklet_info.frame_id, f_back, tasklet.thread_id) + debugger = None + + elif tasklet.is_current: + if tasklet_info.frame_id is not None: + # Remove info about stackless suspended when it starts to run. + remove_custom_frame(tasklet_info.frame_id) + tasklet_info.frame_id = None + + finally: + tasklet = None + tasklet_info = None + f_back = None + + except: + pydev_log.exception() + + if _application_set_schedule_callback is not None: + return _application_set_schedule_callback(prev, next) + + _original_setup = stackless.tasklet.setup + + #======================================================================================================================= + # setup + #======================================================================================================================= + def setup(self, *args, **kwargs): + ''' + Called to run a new tasklet: rebind the creation so that we can trace it. + ''' + + f = self.tempval + + def new_f(old_f, args, kwargs): + + debugger = get_global_debugger() + if debugger is not None: + debugger.enable_tracing() + + debugger = None + + # Remove our own traces :) + self.tempval = old_f + register_tasklet_info(self) + + # Hover old_f to see the stackless being created and *args and **kwargs to see its parameters. + return old_f(*args, **kwargs) + + # This is the way to tell stackless that the function it should execute is our function, not the original one. Note: + # setting tempval is the same as calling bind(new_f), but it seems that there's no other way to get the currently + # bound function, so, keeping on using tempval instead of calling bind (which is actually the same thing in a better + # API). + + self.tempval = new_f + + return _original_setup(self, f, args, kwargs) + + #======================================================================================================================= + # __call__ + #======================================================================================================================= + def __call__(self, *args, **kwargs): + ''' + Called to run a new tasklet: rebind the creation so that we can trace it. + ''' + + return setup(self, *args, **kwargs) + + _original_run = stackless.run + + #======================================================================================================================= + # run + #======================================================================================================================= + def run(*args, **kwargs): + debugger = get_global_debugger() + if debugger is not None: + debugger.enable_tracing() + debugger = None + + return _original_run(*args, **kwargs) + + +#======================================================================================================================= +# patch_stackless +#======================================================================================================================= +def patch_stackless(): + ''' + This function should be called to patch the stackless module so that new tasklets are properly tracked in the + debugger. + ''' + global _application_set_schedule_callback + _application_set_schedule_callback = stackless.set_schedule_callback(_schedule_callback) + + def set_schedule_callback(callable): + global _application_set_schedule_callback + old = _application_set_schedule_callback + _application_set_schedule_callback = callable + return old + + def get_schedule_callback(): + global _application_set_schedule_callback + return _application_set_schedule_callback + + set_schedule_callback.__doc__ = stackless.set_schedule_callback.__doc__ + if hasattr(stackless, "get_schedule_callback"): + get_schedule_callback.__doc__ = stackless.get_schedule_callback.__doc__ + stackless.set_schedule_callback = set_schedule_callback + stackless.get_schedule_callback = get_schedule_callback + + if not hasattr(stackless.tasklet, "trace_function"): + # Older versions of Stackless, released before 2014 + __call__.__doc__ = stackless.tasklet.__call__.__doc__ + stackless.tasklet.__call__ = __call__ + + setup.__doc__ = stackless.tasklet.setup.__doc__ + stackless.tasklet.setup = setup + + run.__doc__ = stackless.run.__doc__ + stackless.run = run + + +patch_stackless = call_only_once(patch_stackless) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_suspended_frames.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_suspended_frames.py new file mode 100644 index 0000000..955ba9a --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_suspended_frames.py @@ -0,0 +1,533 @@ +from contextlib import contextmanager +import sys + +from _pydevd_bundle.pydevd_constants import get_frame, RETURN_VALUES_DICT, \ + ForkSafeLock, GENERATED_LEN_ATTR_NAME, silence_warnings_decorator +from _pydevd_bundle.pydevd_xml import get_variable_details, get_type +from _pydev_bundle.pydev_override import overrides +from _pydevd_bundle.pydevd_resolver import sorted_attributes_key, TOO_LARGE_ATTR, get_var_scope +from _pydevd_bundle.pydevd_safe_repr import SafeRepr +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_vars +from _pydev_bundle.pydev_imports import Exec +from _pydevd_bundle.pydevd_frame_utils import FramesList +from _pydevd_bundle.pydevd_utils import ScopeRequest, DAPGrouper, Timer + + +class _AbstractVariable(object): + + # Default attributes in class, set in instance. + + name = None + value = None + evaluate_name = None + + def __init__(self, py_db): + assert py_db is not None + self.py_db = py_db + + def get_name(self): + return self.name + + def get_value(self): + return self.value + + def get_variable_reference(self): + return id(self.value) + + def get_var_data(self, fmt=None, **safe_repr_custom_attrs): + ''' + :param dict fmt: + Format expected by the DAP (keys: 'hex': bool, 'rawString': bool) + ''' + timer = Timer() + safe_repr = SafeRepr() + if fmt is not None: + safe_repr.convert_to_hex = fmt.get('hex', False) + safe_repr.raw_value = fmt.get('rawString', False) + for key, val in safe_repr_custom_attrs.items(): + setattr(safe_repr, key, val) + + type_name, _type_qualifier, _is_exception_on_eval, resolver, value = get_variable_details( + self.value, to_string=safe_repr) + + is_raw_string = type_name in ('str', 'bytes', 'bytearray') + + attributes = [] + + if is_raw_string: + attributes.append('rawString') + + name = self.name + + if self._is_return_value: + attributes.append('readOnly') + name = '(return) %s' % (name,) + + elif name in (TOO_LARGE_ATTR, GENERATED_LEN_ATTR_NAME): + attributes.append('readOnly') + + try: + if self.value.__class__ == DAPGrouper: + type_name = '' + except: + pass # Ignore errors accessing __class__. + + var_data = { + 'name': name, + 'value': value, + 'type': type_name, + } + + if self.evaluate_name is not None: + var_data['evaluateName'] = self.evaluate_name + + if resolver is not None: # I.e.: it's a container + var_data['variablesReference'] = self.get_variable_reference() + else: + var_data['variablesReference'] = 0 # It's mandatory (although if == 0 it doesn't have children). + + if len(attributes) > 0: + var_data['presentationHint'] = {'attributes': attributes} + + timer.report_if_compute_repr_attr_slow('', name, type_name) + return var_data + + def get_children_variables(self, fmt=None, scope=None): + raise NotImplementedError() + + def get_child_variable_named(self, name, fmt=None, scope=None): + for child_var in self.get_children_variables(fmt=fmt, scope=scope): + if child_var.get_name() == name: + return child_var + return None + + def _group_entries(self, lst, handle_return_values): + scope_to_grouper = {} + + group_entries = [] + if isinstance(self.value, DAPGrouper): + new_lst = lst + else: + new_lst = [] + get_presentation = self.py_db.variable_presentation.get_presentation + # Now that we have the contents, group items. + for attr_name, attr_value, evaluate_name in lst: + scope = get_var_scope(attr_name, attr_value, evaluate_name, handle_return_values) + + entry = (attr_name, attr_value, evaluate_name) + if scope: + presentation = get_presentation(scope) + if presentation == 'hide': + continue + + elif presentation == 'inline': + new_lst.append(entry) + + else: # group + if scope not in scope_to_grouper: + grouper = DAPGrouper(scope) + scope_to_grouper[scope] = grouper + else: + grouper = scope_to_grouper[scope] + + grouper.contents_debug_adapter_protocol.append(entry) + + else: + new_lst.append(entry) + + for scope in DAPGrouper.SCOPES_SORTED: + grouper = scope_to_grouper.get(scope) + if grouper is not None: + group_entries.append((scope, grouper, None)) + + return new_lst, group_entries + + +class _ObjectVariable(_AbstractVariable): + + def __init__(self, py_db, name, value, register_variable, is_return_value=False, evaluate_name=None, frame=None): + _AbstractVariable.__init__(self, py_db) + self.frame = frame + self.name = name + self.value = value + self._register_variable = register_variable + self._register_variable(self) + self._is_return_value = is_return_value + self.evaluate_name = evaluate_name + + @silence_warnings_decorator + @overrides(_AbstractVariable.get_children_variables) + def get_children_variables(self, fmt=None, scope=None): + _type, _type_name, resolver = get_type(self.value) + + children_variables = [] + if resolver is not None: # i.e.: it's a container. + if hasattr(resolver, 'get_contents_debug_adapter_protocol'): + # The get_contents_debug_adapter_protocol needs to return sorted. + lst = resolver.get_contents_debug_adapter_protocol(self.value, fmt=fmt) + else: + # If there's no special implementation, the default is sorting the keys. + dct = resolver.get_dictionary(self.value) + lst = sorted(dct.items(), key=lambda tup: sorted_attributes_key(tup[0])) + # No evaluate name in this case. + lst = [(key, value, None) for (key, value) in lst] + + lst, group_entries = self._group_entries(lst, handle_return_values=False) + if group_entries: + lst = group_entries + lst + parent_evaluate_name = self.evaluate_name + if parent_evaluate_name: + for key, val, evaluate_name in lst: + if evaluate_name is not None: + if callable(evaluate_name): + evaluate_name = evaluate_name(parent_evaluate_name) + else: + evaluate_name = parent_evaluate_name + evaluate_name + variable = _ObjectVariable( + self.py_db, key, val, self._register_variable, evaluate_name=evaluate_name, frame=self.frame) + children_variables.append(variable) + else: + for key, val, evaluate_name in lst: + # No evaluate name + variable = _ObjectVariable(self.py_db, key, val, self._register_variable, frame=self.frame) + children_variables.append(variable) + + return children_variables + + def change_variable(self, name, value, py_db, fmt=None): + + children_variable = self.get_child_variable_named(name) + if children_variable is None: + return None + + var_data = children_variable.get_var_data() + evaluate_name = var_data.get('evaluateName') + + if not evaluate_name: + # Note: right now we only pass control to the resolver in the cases where + # there's no evaluate name (the idea being that if we can evaluate it, + # we can use that evaluation to set the value too -- if in the future + # a case where this isn't true is found this logic may need to be changed). + _type, _type_name, container_resolver = get_type(self.value) + if hasattr(container_resolver, 'change_var_from_name'): + try: + new_value = eval(value) + except: + return None + new_key = container_resolver.change_var_from_name(self.value, name, new_value) + if new_key is not None: + return _ObjectVariable( + self.py_db, new_key, new_value, self._register_variable, evaluate_name=None, frame=self.frame) + + return None + else: + return None + + frame = self.frame + if frame is None: + return None + + try: + # This handles the simple cases (such as dict, list, object) + Exec('%s=%s' % (evaluate_name, value), frame.f_globals, frame.f_locals) + except: + return None + + return self.get_child_variable_named(name, fmt=fmt) + + +def sorted_variables_key(obj): + return sorted_attributes_key(obj.name) + + +class _FrameVariable(_AbstractVariable): + + def __init__(self, py_db, frame, register_variable): + _AbstractVariable.__init__(self, py_db) + self.frame = frame + + self.name = self.frame.f_code.co_name + self.value = frame + + self._register_variable = register_variable + self._register_variable(self) + + def change_variable(self, name, value, py_db, fmt=None): + frame = self.frame + + pydevd_vars.change_attr_expression(frame, name, value, py_db) + + return self.get_child_variable_named(name, fmt=fmt) + + @silence_warnings_decorator + @overrides(_AbstractVariable.get_children_variables) + def get_children_variables(self, fmt=None, scope=None): + children_variables = [] + if scope is not None: + assert isinstance(scope, ScopeRequest) + scope = scope.scope + + if scope in ('locals', None): + dct = self.frame.f_locals + elif scope == 'globals': + dct = self.frame.f_globals + else: + raise AssertionError('Unexpected scope: %s' % (scope,)) + + lst, group_entries = self._group_entries([(x[0], x[1], None) for x in list(dct.items()) if x[0] != '_pydev_stop_at_break'], handle_return_values=True) + group_variables = [] + + for key, val, _ in group_entries: + # Make sure that the contents in the group are also sorted. + val.contents_debug_adapter_protocol.sort(key=lambda v:sorted_attributes_key(v[0])) + variable = _ObjectVariable(self.py_db, key, val, self._register_variable, False, key, frame=self.frame) + group_variables.append(variable) + + for key, val, _ in lst: + is_return_value = key == RETURN_VALUES_DICT + if is_return_value: + for return_key, return_value in val.items(): + variable = _ObjectVariable( + self.py_db, return_key, return_value, self._register_variable, is_return_value, '%s[%r]' % (key, return_key), frame=self.frame) + children_variables.append(variable) + else: + variable = _ObjectVariable(self.py_db, key, val, self._register_variable, is_return_value, key, frame=self.frame) + children_variables.append(variable) + + # Frame variables always sorted. + children_variables.sort(key=sorted_variables_key) + if group_variables: + # Groups have priority over other variables. + children_variables = group_variables + children_variables + + return children_variables + + +class _FramesTracker(object): + ''' + This is a helper class to be used to track frames when a thread becomes suspended. + ''' + + def __init__(self, suspended_frames_manager, py_db): + self._suspended_frames_manager = suspended_frames_manager + self.py_db = py_db + self._frame_id_to_frame = {} + + # Note that a given frame may appear in multiple threads when we have custom + # frames added, but as those are coroutines, this map will point to the actual + # main thread (which is the one that needs to be suspended for us to get the + # variables). + self._frame_id_to_main_thread_id = {} + + # A map of the suspended thread id -> list(frames ids) -- note that + # frame ids are kept in order (the first one is the suspended frame). + self._thread_id_to_frame_ids = {} + + self._thread_id_to_frames_list = {} + + # The main suspended thread (if this is a coroutine this isn't the id of the + # coroutine thread, it's the id of the actual suspended thread). + self._main_thread_id = None + + # Helper to know if it was already untracked. + self._untracked = False + + # We need to be thread-safe! + self._lock = ForkSafeLock() + + self._variable_reference_to_variable = {} + + def _register_variable(self, variable): + variable_reference = variable.get_variable_reference() + self._variable_reference_to_variable[variable_reference] = variable + + def obtain_as_variable(self, name, value, evaluate_name=None, frame=None): + if evaluate_name is None: + evaluate_name = name + + variable_reference = id(value) + variable = self._variable_reference_to_variable.get(variable_reference) + if variable is not None: + return variable + + # Still not created, let's do it now. + return _ObjectVariable( + self.py_db, name, value, self._register_variable, is_return_value=False, evaluate_name=evaluate_name, frame=frame) + + def get_main_thread_id(self): + return self._main_thread_id + + def get_variable(self, variable_reference): + return self._variable_reference_to_variable[variable_reference] + + def track(self, thread_id, frames_list, frame_custom_thread_id=None): + ''' + :param thread_id: + The thread id to be used for this frame. + + :param FramesList frames_list: + A list of frames to be tracked (the first is the topmost frame which is suspended at the given thread). + + :param frame_custom_thread_id: + If None this this is the id of the thread id for the custom frame (i.e.: coroutine). + ''' + assert frames_list.__class__ == FramesList + with self._lock: + coroutine_or_main_thread_id = frame_custom_thread_id or thread_id + + if coroutine_or_main_thread_id in self._suspended_frames_manager._thread_id_to_tracker: + sys.stderr.write('pydevd: Something is wrong. Tracker being added twice to the same thread id.\n') + + self._suspended_frames_manager._thread_id_to_tracker[coroutine_or_main_thread_id] = self + self._main_thread_id = thread_id + + frame_ids_from_thread = self._thread_id_to_frame_ids.setdefault( + coroutine_or_main_thread_id, []) + + self._thread_id_to_frames_list[coroutine_or_main_thread_id] = frames_list + for frame in frames_list: + frame_id = id(frame) + self._frame_id_to_frame[frame_id] = frame + _FrameVariable(self.py_db, frame, self._register_variable) # Instancing is enough to register. + self._suspended_frames_manager._variable_reference_to_frames_tracker[frame_id] = self + frame_ids_from_thread.append(frame_id) + + self._frame_id_to_main_thread_id[frame_id] = thread_id + + frame = None + + def untrack_all(self): + with self._lock: + if self._untracked: + # Calling multiple times is expected for the set next statement. + return + self._untracked = True + for thread_id in self._thread_id_to_frame_ids: + self._suspended_frames_manager._thread_id_to_tracker.pop(thread_id, None) + + for frame_id in self._frame_id_to_frame: + del self._suspended_frames_manager._variable_reference_to_frames_tracker[frame_id] + + self._frame_id_to_frame.clear() + self._frame_id_to_main_thread_id.clear() + self._thread_id_to_frame_ids.clear() + self._thread_id_to_frames_list.clear() + self._main_thread_id = None + self._suspended_frames_manager = None + self._variable_reference_to_variable.clear() + + def get_frames_list(self, thread_id): + with self._lock: + return self._thread_id_to_frames_list.get(thread_id) + + def find_frame(self, thread_id, frame_id): + with self._lock: + return self._frame_id_to_frame.get(frame_id) + + def create_thread_suspend_command(self, thread_id, stop_reason, message, suspend_type): + with self._lock: + # First one is topmost frame suspended. + frames_list = self._thread_id_to_frames_list[thread_id] + + cmd = self.py_db.cmd_factory.make_thread_suspend_message( + self.py_db, thread_id, frames_list, stop_reason, message, suspend_type) + + frames_list = None + return cmd + + +class SuspendedFramesManager(object): + + def __init__(self): + self._thread_id_to_fake_frames = {} + self._thread_id_to_tracker = {} + + # Mappings + self._variable_reference_to_frames_tracker = {} + + def _get_tracker_for_variable_reference(self, variable_reference): + tracker = self._variable_reference_to_frames_tracker.get(variable_reference) + if tracker is not None: + return tracker + + for _thread_id, tracker in self._thread_id_to_tracker.items(): + try: + tracker.get_variable(variable_reference) + except KeyError: + pass + else: + return tracker + + return None + + def get_thread_id_for_variable_reference(self, variable_reference): + ''' + We can't evaluate variable references values on any thread, only in the suspended + thread (the main reason for this is that in UI frameworks inspecting a UI object + from a different thread can potentially crash the application). + + :param int variable_reference: + The variable reference (can be either a frame id or a reference to a previously + gotten variable). + + :return str: + The thread id for the thread to be used to inspect the given variable reference or + None if the thread was already resumed. + ''' + frames_tracker = self._get_tracker_for_variable_reference(variable_reference) + if frames_tracker is not None: + return frames_tracker.get_main_thread_id() + return None + + def get_frame_tracker(self, thread_id): + return self._thread_id_to_tracker.get(thread_id) + + def get_variable(self, variable_reference): + ''' + :raises KeyError + ''' + frames_tracker = self._get_tracker_for_variable_reference(variable_reference) + if frames_tracker is None: + raise KeyError() + return frames_tracker.get_variable(variable_reference) + + def get_frames_list(self, thread_id): + tracker = self._thread_id_to_tracker.get(thread_id) + if tracker is None: + return None + return tracker.get_frames_list(thread_id) + + @contextmanager + def track_frames(self, py_db): + tracker = _FramesTracker(self, py_db) + try: + yield tracker + finally: + tracker.untrack_all() + + def add_fake_frame(self, thread_id, frame_id, frame): + self._thread_id_to_fake_frames.setdefault(thread_id, {})[int(frame_id)] = frame + + def find_frame(self, thread_id, frame_id): + try: + if frame_id == "*": + return get_frame() # any frame is specified with "*" + frame_id = int(frame_id) + + fake_frames = self._thread_id_to_fake_frames.get(thread_id) + if fake_frames is not None: + frame = fake_frames.get(frame_id) + if frame is not None: + return frame + + frames_tracker = self._thread_id_to_tracker.get(thread_id) + if frames_tracker is not None: + frame = frames_tracker.find_frame(thread_id, frame_id) + if frame is not None: + return frame + + return None + except: + pydev_log.exception() + return None diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_thread_lifecycle.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_thread_lifecycle.py new file mode 100644 index 0000000..069b6b6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_thread_lifecycle.py @@ -0,0 +1,96 @@ +from _pydevd_bundle import pydevd_utils +from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info +from _pydevd_bundle.pydevd_comm_constants import CMD_STEP_INTO, CMD_THREAD_SUSPEND +from _pydevd_bundle.pydevd_constants import PYTHON_SUSPEND, STATE_SUSPEND, get_thread_id, STATE_RUN +from _pydev_bundle._pydev_saved_modules import threading +from _pydev_bundle import pydev_log + + +def pydevd_find_thread_by_id(thread_id): + try: + threads = threading.enumerate() + for i in threads: + tid = get_thread_id(i) + if thread_id == tid or thread_id.endswith('|' + tid): + return i + + # This can happen when a request comes for a thread which was previously removed. + pydev_log.info("Could not find thread %s.", thread_id) + pydev_log.info("Available: %s.", ([get_thread_id(t) for t in threads],)) + except: + pydev_log.exception() + + return None + + +def mark_thread_suspended(thread, stop_reason, original_step_cmd=-1): + info = set_additional_thread_info(thread) + info.suspend_type = PYTHON_SUSPEND + if original_step_cmd != -1: + stop_reason = original_step_cmd + thread.stop_reason = stop_reason + + # Note: don't set the 'pydev_original_step_cmd' here if unset. + + if info.pydev_step_cmd == -1: + # If the step command is not specified, set it to step into + # to make sure it'll break as soon as possible. + info.pydev_step_cmd = CMD_STEP_INTO + info.pydev_step_stop = None + + # Mark as suspended as the last thing. + info.pydev_state = STATE_SUSPEND + + return info + + +def internal_run_thread(thread, set_additional_thread_info): + info = set_additional_thread_info(thread) + info.pydev_original_step_cmd = -1 + info.pydev_step_cmd = -1 + info.pydev_step_stop = None + info.pydev_state = STATE_RUN + + +def resume_threads(thread_id, except_thread=None): + pydev_log.info('Resuming threads: %s (except thread: %s)', thread_id, except_thread) + threads = [] + if thread_id == '*': + threads = pydevd_utils.get_non_pydevd_threads() + + elif thread_id.startswith('__frame__:'): + pydev_log.critical("Can't make tasklet run: %s", thread_id) + + else: + threads = [pydevd_find_thread_by_id(thread_id)] + + for t in threads: + if t is None or t is except_thread: + pydev_log.info('Skipped resuming thread: %s', t) + continue + + internal_run_thread(t, set_additional_thread_info=set_additional_thread_info) + + +def suspend_all_threads(py_db, except_thread): + ''' + Suspend all except the one passed as a parameter. + :param except_thread: + ''' + pydev_log.info('Suspending all threads except: %s', except_thread) + all_threads = pydevd_utils.get_non_pydevd_threads() + for t in all_threads: + if getattr(t, 'pydev_do_not_trace', None): + pass # skip some other threads, i.e. ipython history saving thread from debug console + else: + if t is except_thread: + continue + info = mark_thread_suspended(t, CMD_THREAD_SUSPEND) + frame = info.get_topmost_frame(t) + + # Reset the tracing as in this case as it could've set scopes to be untraced. + if frame is not None: + try: + py_db.set_trace_for_frame_and_parents(frame) + finally: + frame = None diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_timeout.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_timeout.py new file mode 100644 index 0000000..4a18caf --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_timeout.py @@ -0,0 +1,239 @@ +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_daemon_thread import PyDBDaemonThread +from _pydevd_bundle.pydevd_constants import thread_get_ident, IS_CPYTHON, NULL +import ctypes +import time +from _pydev_bundle import pydev_log +import weakref +from _pydevd_bundle.pydevd_utils import is_current_thread_main_thread +from _pydevd_bundle import pydevd_utils + +_DEBUG = False # Default should be False as this can be very verbose. + + +class _TimeoutThread(PyDBDaemonThread): + ''' + The idea in this class is that it should be usually stopped waiting + for the next event to be called (paused in a threading.Event.wait). + + When a new handle is added it sets the event so that it processes the handles and + then keeps on waiting as needed again. + + This is done so that it's a bit more optimized than creating many Timer threads. + ''' + + def __init__(self, py_db): + PyDBDaemonThread.__init__(self, py_db) + self._event = threading.Event() + self._handles = [] + + # We could probably do things valid without this lock so that it's possible to add + # handles while processing, but the implementation would also be harder to follow, + # so, for now, we're either processing or adding handles, not both at the same time. + self._lock = threading.Lock() + + def _on_run(self): + wait_time = None + while not self._kill_received: + if _DEBUG: + if wait_time is None: + pydev_log.critical('pydevd_timeout: Wait until a new handle is added.') + else: + pydev_log.critical('pydevd_timeout: Next wait time: %s.', wait_time) + self._event.wait(wait_time) + + if self._kill_received: + self._handles = [] + return + + wait_time = self.process_handles() + + def process_handles(self): + ''' + :return int: + Returns the time we should be waiting for to process the next event properly. + ''' + with self._lock: + if _DEBUG: + pydev_log.critical('pydevd_timeout: Processing handles') + self._event.clear() + handles = self._handles + new_handles = self._handles = [] + + # Do all the processing based on this time (we want to consider snapshots + # of processing time -- anything not processed now may be processed at the + # next snapshot). + curtime = time.time() + + min_handle_timeout = None + + for handle in handles: + if curtime < handle.abs_timeout and not handle.disposed: + # It still didn't time out. + if _DEBUG: + pydev_log.critical('pydevd_timeout: Handle NOT processed: %s', handle) + new_handles.append(handle) + if min_handle_timeout is None: + min_handle_timeout = handle.abs_timeout + + elif handle.abs_timeout < min_handle_timeout: + min_handle_timeout = handle.abs_timeout + + else: + if _DEBUG: + pydev_log.critical('pydevd_timeout: Handle processed: %s', handle) + # Timed out (or disposed), so, let's execute it (should be no-op if disposed). + handle.exec_on_timeout() + + if min_handle_timeout is None: + return None + else: + timeout = min_handle_timeout - curtime + if timeout <= 0: + pydev_log.critical('pydevd_timeout: Expected timeout to be > 0. Found: %s', timeout) + + return timeout + + def do_kill_pydev_thread(self): + PyDBDaemonThread.do_kill_pydev_thread(self) + with self._lock: + self._event.set() + + def add_on_timeout_handle(self, handle): + with self._lock: + self._handles.append(handle) + self._event.set() + + +class _OnTimeoutHandle(object): + + def __init__(self, tracker, abs_timeout, on_timeout, kwargs): + self._str = '_OnTimeoutHandle(%s)' % (on_timeout,) + + self._tracker = weakref.ref(tracker) + self.abs_timeout = abs_timeout + self.on_timeout = on_timeout + if kwargs is None: + kwargs = {} + self.kwargs = kwargs + self.disposed = False + + def exec_on_timeout(self): + # Note: lock should already be obtained when executing this function. + kwargs = self.kwargs + on_timeout = self.on_timeout + + if not self.disposed: + self.disposed = True + self.kwargs = None + self.on_timeout = None + + try: + if _DEBUG: + pydev_log.critical('pydevd_timeout: Calling on timeout: %s with kwargs: %s', on_timeout, kwargs) + + on_timeout(**kwargs) + except Exception: + pydev_log.exception('pydevd_timeout: Exception on callback timeout.') + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + tracker = self._tracker() + + if tracker is None: + lock = NULL + else: + lock = tracker._lock + + with lock: + self.disposed = True + self.kwargs = None + self.on_timeout = None + + def __str__(self): + return self._str + + __repr__ = __str__ + + +class TimeoutTracker(object): + ''' + This is a helper class to track the timeout of something. + ''' + + def __init__(self, py_db): + self._thread = None + self._lock = threading.Lock() + self._py_db = weakref.ref(py_db) + + def call_on_timeout(self, timeout, on_timeout, kwargs=None): + ''' + This can be called regularly to always execute the given function after a given timeout: + + call_on_timeout(py_db, 10, on_timeout) + + + Or as a context manager to stop the method from being called if it finishes before the timeout + elapses: + + with call_on_timeout(py_db, 10, on_timeout): + ... + + Note: the callback will be called from a PyDBDaemonThread. + ''' + with self._lock: + if self._thread is None: + if _DEBUG: + pydev_log.critical('pydevd_timeout: Created _TimeoutThread.') + + self._thread = _TimeoutThread(self._py_db()) + self._thread.start() + + curtime = time.time() + handle = _OnTimeoutHandle(self, curtime + timeout, on_timeout, kwargs) + if _DEBUG: + pydev_log.critical('pydevd_timeout: Added handle: %s.', handle) + self._thread.add_on_timeout_handle(handle) + return handle + + +def create_interrupt_this_thread_callback(): + ''' + The idea here is returning a callback that when called will generate a KeyboardInterrupt + in the thread that called this function. + + If this is the main thread, this means that it'll emulate a Ctrl+C (which may stop I/O + and sleep operations). + + For other threads, this will call PyThreadState_SetAsyncExc to raise + a KeyboardInterrupt before the next instruction (so, it won't really interrupt I/O or + sleep operations). + + :return callable: + Returns a callback that will interrupt the current thread (this may be called + from an auxiliary thread). + ''' + tid = thread_get_ident() + + if is_current_thread_main_thread(): + main_thread = threading.current_thread() + + def raise_on_this_thread(): + pydev_log.debug('Callback to interrupt main thread.') + pydevd_utils.interrupt_main_thread(main_thread) + + else: + + # Note: this works in the sense that it can stop some cpu-intensive slow operation, + # but we can't really interrupt the thread out of some sleep or I/O operation + # (this will only be raised when Python is about to execute the next instruction). + def raise_on_this_thread(): + if IS_CPYTHON: + pydev_log.debug('Interrupt thread: %s', tid) + ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(tid), ctypes.py_object(KeyboardInterrupt)) + else: + pydev_log.debug('It is only possible to interrupt non-main threads in CPython.') + + return raise_on_this_thread diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_api.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_api.py new file mode 120000 index 0000000..f6b0658 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_api.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9e/57/f9/ea2f612b3e838a41f94f173f7f0671a8dd60495eb4429b343835b37388 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py new file mode 120000 index 0000000..7c8ebe2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6c/80/d1/8628680477dc554f635652b64ee4beb042281c461e8d391d1ec4976a03 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py new file mode 100644 index 0000000..88a3f08 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py @@ -0,0 +1,490 @@ +from _pydev_bundle.pydev_is_thread_alive import is_thread_alive +from _pydev_bundle.pydev_log import exception as pydev_log_exception +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_constants import (get_current_thread_id, NO_FTRACE, + USE_CUSTOM_SYS_CURRENT_FRAMES_MAP, ForkSafeLock) +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + +# IFDEF CYTHON +# from cpython.object cimport PyObject +# from cpython.ref cimport Py_INCREF, Py_XDECREF +# ELSE +from _pydevd_bundle.pydevd_frame import PyDBFrame, is_unhandled_exception +# ENDIF + +# IFDEF CYTHON +# cdef dict _global_notify_skipped_step_in +# cython_inline_constant: CMD_STEP_INTO = 107 +# cython_inline_constant: CMD_STEP_INTO_MY_CODE = 144 +# cython_inline_constant: CMD_STEP_RETURN = 109 +# cython_inline_constant: CMD_STEP_RETURN_MY_CODE = 160 +# ELSE +# Note: those are now inlined on cython. +CMD_STEP_INTO = 107 +CMD_STEP_INTO_MY_CODE = 144 +CMD_STEP_RETURN = 109 +CMD_STEP_RETURN_MY_CODE = 160 +# ENDIF + +# Cache where we should keep that we completely skipped entering some context. +# It needs to be invalidated when: +# - Breakpoints are changed +# It can be used when running regularly (without step over/step in/step return) +global_cache_skips = {} +global_cache_frame_skips = {} + +_global_notify_skipped_step_in = False +_global_notify_skipped_step_in_lock = ForkSafeLock() + + +def notify_skipped_step_in_because_of_filters(py_db, frame): + global _global_notify_skipped_step_in + + with _global_notify_skipped_step_in_lock: + if _global_notify_skipped_step_in: + # Check with lock in place (callers should actually have checked + # before without the lock in place due to performance). + return + _global_notify_skipped_step_in = True + py_db.notify_skipped_step_in_because_of_filters(frame) + +# IFDEF CYTHON +# cdef class SafeCallWrapper: +# cdef method_object +# def __init__(self, method_object): +# self.method_object = method_object +# def __call__(self, *args): +# #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field +# #in the frame, and that reference might get destroyed by set trace on frame and parents +# cdef PyObject* method_obj = self.method_object +# Py_INCREF(method_obj) +# ret = (method_obj)(*args) +# Py_XDECREF (method_obj) +# return SafeCallWrapper(ret) if ret is not None else None +# def get_method_object(self): +# return self.method_object +# ELSE +# ENDIF + + +def fix_top_level_trace_and_get_trace_func(py_db, frame): + # IFDEF CYTHON + # cdef str filename; + # cdef str name; + # cdef tuple args; + # ENDIF + + # Note: this is always the first entry-point in the tracing for any thread. + # After entering here we'll set a new tracing function for this thread + # where more information is cached (and will also setup the tracing for + # frames where we should deal with unhandled exceptions). + thread = None + # Cache the frame which should be traced to deal with unhandled exceptions. + # (i.e.: thread entry-points). + + f_unhandled = frame + # print('called at', f_unhandled.f_code.co_name, f_unhandled.f_code.co_filename, f_unhandled.f_code.co_firstlineno) + force_only_unhandled_tracer = False + while f_unhandled is not None: + # name = splitext(basename(f_unhandled.f_code.co_filename))[0] + + name = f_unhandled.f_code.co_filename + # basename + i = name.rfind('/') + j = name.rfind('\\') + if j > i: + i = j + if i >= 0: + name = name[i + 1:] + # remove ext + i = name.rfind('.') + if i >= 0: + name = name[:i] + + if name == 'threading': + if f_unhandled.f_code.co_name in ('__bootstrap', '_bootstrap'): + # We need __bootstrap_inner, not __bootstrap. + return None, False + + elif f_unhandled.f_code.co_name in ('__bootstrap_inner', '_bootstrap_inner'): + # Note: be careful not to use threading.currentThread to avoid creating a dummy thread. + t = f_unhandled.f_locals.get('self') + force_only_unhandled_tracer = True + if t is not None and isinstance(t, threading.Thread): + thread = t + break + + elif name == 'pydev_monkey': + if f_unhandled.f_code.co_name == '__call__': + force_only_unhandled_tracer = True + break + + elif name == 'pydevd': + if f_unhandled.f_code.co_name in ('run', 'main'): + # We need to get to _exec + return None, False + + if f_unhandled.f_code.co_name == '_exec': + force_only_unhandled_tracer = True + break + + elif name == 'pydevd_tracing': + return None, False + + elif f_unhandled.f_back is None: + break + + f_unhandled = f_unhandled.f_back + + if thread is None: + # Important: don't call threadingCurrentThread if we're in the threading module + # to avoid creating dummy threads. + if py_db.threading_get_ident is not None: + thread = py_db.threading_active.get(py_db.threading_get_ident()) + if thread is None: + return None, False + else: + # Jython does not have threading.get_ident(). + thread = py_db.threading_current_thread() + + if getattr(thread, 'pydev_do_not_trace', None): + py_db.disable_tracing() + return None, False + + try: + additional_info = thread.additional_info + if additional_info is None: + raise AttributeError() + except: + additional_info = py_db.set_additional_thread_info(thread) + + # print('enter thread tracer', thread, get_current_thread_id(thread)) + args = (py_db, thread, additional_info, global_cache_skips, global_cache_frame_skips) + + if f_unhandled is not None: + if f_unhandled.f_back is None and not force_only_unhandled_tracer: + # Happens when we attach to a running program (cannot reuse instance because it's mutable). + top_level_thread_tracer = TopLevelThreadTracerNoBackFrame(ThreadTracer(args), args) + additional_info.top_level_thread_tracer_no_back_frames.append(top_level_thread_tracer) # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). + else: + top_level_thread_tracer = additional_info.top_level_thread_tracer_unhandled + if top_level_thread_tracer is None: + # Stop in some internal place to report about unhandled exceptions + top_level_thread_tracer = TopLevelThreadTracerOnlyUnhandledExceptions(args) + additional_info.top_level_thread_tracer_unhandled = top_level_thread_tracer # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). + + # print(' --> found to trace unhandled', f_unhandled.f_code.co_name, f_unhandled.f_code.co_filename, f_unhandled.f_code.co_firstlineno) + f_trace = top_level_thread_tracer.get_trace_dispatch_func() + # IFDEF CYTHON + # f_trace = SafeCallWrapper(f_trace) + # ENDIF + f_unhandled.f_trace = f_trace + + if frame is f_unhandled: + return f_trace, False + + thread_tracer = additional_info.thread_tracer + if thread_tracer is None or thread_tracer._args[0] is not py_db: + thread_tracer = ThreadTracer(args) + additional_info.thread_tracer = thread_tracer + +# IFDEF CYTHON +# return SafeCallWrapper(thread_tracer), True +# ELSE + return thread_tracer, True +# ENDIF + + +def trace_dispatch(py_db, frame, event, arg): + thread_trace_func, apply_to_settrace = py_db.fix_top_level_trace_and_get_trace_func(py_db, frame) + if thread_trace_func is None: + return None if event == 'call' else NO_FTRACE + if apply_to_settrace: + py_db.enable_tracing(thread_trace_func) + return thread_trace_func(frame, event, arg) + + +# IFDEF CYTHON +# cdef class TopLevelThreadTracerOnlyUnhandledExceptions: +# cdef public tuple _args; +# def __init__(self, tuple args): +# self._args = args +# ELSE +class TopLevelThreadTracerOnlyUnhandledExceptions(object): + + def __init__(self, args): + self._args = args +# ENDIF + + def trace_unhandled_exceptions(self, frame, event, arg): + # Note that we ignore the frame as this tracing method should only be put in topmost frames already. + # print('trace_unhandled_exceptions', event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno) + if event == 'exception' and arg is not None: + py_db, t, additional_info = self._args[0:3] + if arg is not None: + if not additional_info.suspended_at_unhandled: + additional_info.suspended_at_unhandled = True + + py_db.stop_on_unhandled_exception(py_db, t, additional_info, arg) + + # No need to reset frame.f_trace to keep the same trace function. + return self.trace_unhandled_exceptions + + def get_trace_dispatch_func(self): + return self.trace_unhandled_exceptions + + +# IFDEF CYTHON +# cdef class TopLevelThreadTracerNoBackFrame: +# +# cdef public object _frame_trace_dispatch; +# cdef public tuple _args; +# cdef public object try_except_infos; +# cdef public object _last_exc_arg; +# cdef public set _raise_lines; +# cdef public int _last_raise_line; +# +# def __init__(self, frame_trace_dispatch, tuple args): +# self._frame_trace_dispatch = frame_trace_dispatch +# self._args = args +# self.try_except_infos = None +# self._last_exc_arg = None +# self._raise_lines = set() +# self._last_raise_line = -1 +# ELSE +class TopLevelThreadTracerNoBackFrame(object): + ''' + This tracer is pretty special in that it's dealing with a frame without f_back (i.e.: top frame + on remote attach or QThread). + + This means that we have to carefully inspect exceptions to discover whether the exception will + be unhandled or not (if we're dealing with an unhandled exception we need to stop as unhandled, + otherwise we need to use the regular tracer -- unfortunately the debugger has little info to + work with in the tracing -- see: https://bugs.python.org/issue34099, so, we inspect bytecode to + determine if some exception will be traced or not... note that if this is not available -- such + as on Jython -- we consider any top-level exception to be unnhandled). + ''' + + def __init__(self, frame_trace_dispatch, args): + self._frame_trace_dispatch = frame_trace_dispatch + self._args = args + self.try_except_infos = None + self._last_exc_arg = None + self._raise_lines = set() + self._last_raise_line = -1 +# ENDIF + + def trace_dispatch_and_unhandled_exceptions(self, frame, event, arg): + # DEBUG = 'code_to_debug' in frame.f_code.co_filename + # if DEBUG: print('trace_dispatch_and_unhandled_exceptions: %s %s %s %s %s %s' % (event, frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno, self._frame_trace_dispatch, frame.f_lineno)) + frame_trace_dispatch = self._frame_trace_dispatch + if frame_trace_dispatch is not None: + self._frame_trace_dispatch = frame_trace_dispatch(frame, event, arg) + + if event == 'exception': + self._last_exc_arg = arg + self._raise_lines.add(frame.f_lineno) + self._last_raise_line = frame.f_lineno + + elif event == 'return' and self._last_exc_arg is not None: + # For unhandled exceptions we actually track the return when at the topmost level. + try: + py_db, t, additional_info = self._args[0:3] + if not additional_info.suspended_at_unhandled: # Note: only check it here, don't set. + if is_unhandled_exception(self, py_db, frame, self._last_raise_line, self._raise_lines): + py_db.stop_on_unhandled_exception(py_db, t, additional_info, self._last_exc_arg) + finally: + # Remove reference to exception after handling it. + self._last_exc_arg = None + + ret = self.trace_dispatch_and_unhandled_exceptions + + # Need to reset (the call to _frame_trace_dispatch may have changed it). + # IFDEF CYTHON + # frame.f_trace = SafeCallWrapper(ret) + # ELSE + frame.f_trace = ret + # ENDIF + return ret + + def get_trace_dispatch_func(self): + return self.trace_dispatch_and_unhandled_exceptions + + +# IFDEF CYTHON +# cdef class ThreadTracer: +# cdef public tuple _args; +# def __init__(self, tuple args): +# self._args = args +# ELSE +class ThreadTracer(object): + + def __init__(self, args): + self._args = args +# ENDIF + + def __call__(self, frame, event, arg): + ''' This is the callback used when we enter some context in the debugger. + + We also decorate the thread we are in with info about the debugging. + The attributes added are: + pydev_state + pydev_step_stop + pydev_step_cmd + pydev_notify_kill + + :param PyDB py_db: + This is the global debugger (this method should actually be added as a method to it). + ''' + # IFDEF CYTHON + # cdef str filename; + # cdef str base; + # cdef int pydev_step_cmd; + # cdef object frame_cache_key; + # cdef dict cache_skips; + # cdef bint is_stepping; + # cdef tuple abs_path_canonical_path_and_base; + # cdef PyDBAdditionalThreadInfo additional_info; + # ENDIF + + # DEBUG = 'code_to_debug' in frame.f_code.co_filename + # if DEBUG: print('ENTER: trace_dispatch: %s %s %s %s' % (frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name)) + py_db, t, additional_info, cache_skips, frame_skips_cache = self._args + if additional_info.is_tracing: + return None if event == 'call' else NO_FTRACE # we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + + additional_info.is_tracing += 1 + try: + pydev_step_cmd = additional_info.pydev_step_cmd + is_stepping = pydev_step_cmd != -1 + if py_db.pydb_disposed: + return None if event == 'call' else NO_FTRACE + + # if thread is not alive, cancel trace_dispatch processing + if not is_thread_alive(t): + py_db.notify_thread_not_alive(get_current_thread_id(t)) + return None if event == 'call' else NO_FTRACE + + # Note: it's important that the context name is also given because we may hit something once + # in the global context and another in the local context. + frame_cache_key = frame.f_code + if frame_cache_key in cache_skips: + if not is_stepping: + # if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + else: + # When stepping we can't take into account caching based on the breakpoints (only global filtering). + if cache_skips.get(frame_cache_key) == 1: + + if additional_info.pydev_original_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE) and not _global_notify_skipped_step_in: + notify_skipped_step_in_because_of_filters(py_db, frame) + + back_frame = frame.f_back + if back_frame is not None and pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE): + back_frame_cache_key = back_frame.f_code + if cache_skips.get(back_frame_cache_key) == 1: + # if DEBUG: print('skipped: trace_dispatch (cache hit: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + else: + # if DEBUG: print('skipped: trace_dispatch (cache hit: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + + try: + # Make fast path faster! + abs_path_canonical_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + abs_path_canonical_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + + file_type = py_db.get_file_type(frame, abs_path_canonical_path_and_base) # we don't want to debug threading or anything related to pydevd + + if file_type is not None: + if file_type == 1: # inlining LIB_FILE = 1 + if not py_db.in_project_scope(frame, abs_path_canonical_path_and_base[0]): + # if DEBUG: print('skipped: trace_dispatch (not in scope)', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + cache_skips[frame_cache_key] = 1 + return None if event == 'call' else NO_FTRACE + else: + # if DEBUG: print('skipped: trace_dispatch', abs_path_canonical_path_and_base[2], frame.f_lineno, event, frame.f_code.co_name, file_type) + cache_skips[frame_cache_key] = 1 + return None if event == 'call' else NO_FTRACE + + if py_db.is_files_filter_enabled: + if py_db.apply_files_filter(frame, abs_path_canonical_path_and_base[0], False): + cache_skips[frame_cache_key] = 1 + + if is_stepping and additional_info.pydev_original_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE) and not _global_notify_skipped_step_in: + notify_skipped_step_in_because_of_filters(py_db, frame) + + # A little gotcha, sometimes when we're stepping in we have to stop in a + # return event showing the back frame as the current frame, so, we need + # to check not only the current frame but the back frame too. + back_frame = frame.f_back + if back_frame is not None and pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE): + if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False): + back_frame_cache_key = back_frame.f_code + cache_skips[back_frame_cache_key] = 1 + # if DEBUG: print('skipped: trace_dispatch (filtered out: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + else: + # if DEBUG: print('skipped: trace_dispatch (filtered out: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None if event == 'call' else NO_FTRACE + + # if DEBUG: print('trace_dispatch', filename, frame.f_lineno, event, frame.f_code.co_name, file_type) + + # Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak + # reference to the frame). + ret = PyDBFrame( + ( + py_db, abs_path_canonical_path_and_base, additional_info, t, frame_skips_cache, frame_cache_key, + ) + ).trace_dispatch(frame, event, arg) + if ret is None: + # 1 means skipped because of filters. + # 2 means skipped because no breakpoints were hit. + cache_skips[frame_cache_key] = 2 + return None if event == 'call' else NO_FTRACE + + # IFDEF CYTHON + # frame.f_trace = SafeCallWrapper(ret) # Make sure we keep the returned tracer. + # ELSE + frame.f_trace = ret # Make sure we keep the returned tracer. + # ENDIF + return ret + + except SystemExit: + return None if event == 'call' else NO_FTRACE + + except Exception: + if py_db.pydb_disposed: + return None if event == 'call' else NO_FTRACE # Don't log errors when we're shutting down. + # Log it + try: + if pydev_log_exception is not None: + # This can actually happen during the interpreter shutdown in Python 2.7 + pydev_log_exception() + except: + # Error logging? We're really in the interpreter shutdown... + # (https://github.com/fabioz/PyDev.Debugger/issues/8) + pass + return None if event == 'call' else NO_FTRACE + finally: + additional_info.is_tracing -= 1 + + +if USE_CUSTOM_SYS_CURRENT_FRAMES_MAP: + # This is far from ideal, as we'll leak frames (we'll always have the last created frame, not really + # the last topmost frame saved -- this should be Ok for our usage, but it may leak frames and things + # may live longer... as IronPython is garbage-collected, things should live longer anyways, so, it + # shouldn't be an issue as big as it's in CPython -- it may still be annoying, but this should + # be a reasonable workaround until IronPython itself is able to provide that functionality). + # + # See: https://github.com/IronLanguages/main/issues/1630 + from _pydevd_bundle.pydevd_constants import constructed_tid_to_last_frame + + _original_call = ThreadTracer.__call__ + + def __call__(self, frame, event, arg): + constructed_tid_to_last_frame[self._args[1].ident] = frame + return _original_call(self, frame, event, arg) + + ThreadTracer.__call__ = __call__ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_traceproperty.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_traceproperty.py new file mode 100644 index 0000000..2f38e4b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_traceproperty.py @@ -0,0 +1,92 @@ +'''For debug purpose we are replacing actual builtin property by the debug property +''' +from _pydevd_bundle.pydevd_comm import get_global_debugger +from _pydev_bundle import pydev_log + + +#======================================================================================================================= +# replace_builtin_property +#======================================================================================================================= +def replace_builtin_property(new_property=None): + if new_property is None: + new_property = DebugProperty + original = property + try: + import builtins + builtins.__dict__['property'] = new_property + except: + pydev_log.exception() # @Reimport + return original + + +#======================================================================================================================= +# DebugProperty +#======================================================================================================================= +class DebugProperty(object): + """A custom property which allows python property to get + controlled by the debugger and selectively disable/re-enable + the tracing. + """ + + def __init__(self, fget=None, fset=None, fdel=None, doc=None): + self.fget = fget + self.fset = fset + self.fdel = fdel + self.__doc__ = doc + + def __get__(self, obj, objtype=None): + if obj is None: + return self + global_debugger = get_global_debugger() + try: + if global_debugger is not None and global_debugger.disable_property_getter_trace: + global_debugger.disable_tracing() + if self.fget is None: + raise AttributeError("unreadable attribute") + return self.fget(obj) + finally: + if global_debugger is not None: + global_debugger.enable_tracing() + + def __set__(self, obj, value): + global_debugger = get_global_debugger() + try: + if global_debugger is not None and global_debugger.disable_property_setter_trace: + global_debugger.disable_tracing() + if self.fset is None: + raise AttributeError("can't set attribute") + self.fset(obj, value) + finally: + if global_debugger is not None: + global_debugger.enable_tracing() + + def __delete__(self, obj): + global_debugger = get_global_debugger() + try: + if global_debugger is not None and global_debugger.disable_property_deleter_trace: + global_debugger.disable_tracing() + if self.fdel is None: + raise AttributeError("can't delete attribute") + self.fdel(obj) + finally: + if global_debugger is not None: + global_debugger.enable_tracing() + + def getter(self, fget): + """Overriding getter decorator for the property + """ + self.fget = fget + return self + + def setter(self, fset): + """Overriding setter decorator for the property + """ + self.fset = fset + return self + + def deleter(self, fdel): + """Overriding deleter decorator for the property + """ + self.fdel = fdel + return self + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_utils.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_utils.py new file mode 100644 index 0000000..a45caa1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_utils.py @@ -0,0 +1,511 @@ +from __future__ import nested_scopes +import traceback +import warnings +from _pydev_bundle import pydev_log +from _pydev_bundle._pydev_saved_modules import thread, threading +from _pydev_bundle import _pydev_saved_modules +import signal +import os +import ctypes +from importlib import import_module +from urllib.parse import quote # @UnresolvedImport +import time +import inspect +import sys +from _pydevd_bundle.pydevd_constants import USE_CUSTOM_SYS_CURRENT_FRAMES, IS_PYPY, SUPPORT_GEVENT, \ + GEVENT_SUPPORT_NOT_SET_MSG, GENERATED_LEN_ATTR_NAME, PYDEVD_WARN_SLOW_RESOLVE_TIMEOUT, \ + get_global_debugger + + +def save_main_module(file, module_name): + # patch provided by: Scott Schlesier - when script is run, it does not + # use globals from pydevd: + # This will prevent the pydevd script from contaminating the namespace for the script to be debugged + # pretend pydevd is not the main module, and + # convince the file to be debugged that it was loaded as main + sys.modules[module_name] = sys.modules['__main__'] + sys.modules[module_name].__name__ = module_name + + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + warnings.simplefilter("ignore", category=PendingDeprecationWarning) + from imp import new_module + + m = new_module('__main__') + sys.modules['__main__'] = m + if hasattr(sys.modules[module_name], '__loader__'): + m.__loader__ = getattr(sys.modules[module_name], '__loader__') + m.__file__ = file + + return m + + +def is_current_thread_main_thread(): + if hasattr(threading, 'main_thread'): + return threading.current_thread() is threading.main_thread() + else: + return isinstance(threading.current_thread(), threading._MainThread) + + +def get_main_thread(): + if hasattr(threading, 'main_thread'): + return threading.main_thread() + else: + for t in threading.enumerate(): + if isinstance(t, threading._MainThread): + return t + return None + + +def to_number(x): + if is_string(x): + try: + n = float(x) + return n + except ValueError: + pass + + l = x.find('(') + if l != -1: + y = x[0:l - 1] + # print y + try: + n = float(y) + return n + except ValueError: + pass + return None + + +def compare_object_attrs_key(x): + if GENERATED_LEN_ATTR_NAME == x: + as_number = to_number(x) + if as_number is None: + as_number = 99999999 + # len() should appear after other attributes in a list. + return (1, as_number) + else: + return (-1, to_string(x)) + + +def is_string(x): + return isinstance(x, str) + + +def to_string(x): + if isinstance(x, str): + return x + else: + return str(x) + + +def print_exc(): + if traceback: + traceback.print_exc() + + +def quote_smart(s, safe='/'): + return quote(s, safe) + + +def get_clsname_for_code(code, frame): + clsname = None + if len(code.co_varnames) > 0: + # We are checking the first argument of the function + # (`self` or `cls` for methods). + first_arg_name = code.co_varnames[0] + if first_arg_name in frame.f_locals: + first_arg_obj = frame.f_locals[first_arg_name] + if inspect.isclass(first_arg_obj): # class method + first_arg_class = first_arg_obj + else: # instance method + if hasattr(first_arg_obj, "__class__"): + first_arg_class = first_arg_obj.__class__ + else: # old style class, fall back on type + first_arg_class = type(first_arg_obj) + func_name = code.co_name + if hasattr(first_arg_class, func_name): + method = getattr(first_arg_class, func_name) + func_code = None + if hasattr(method, 'func_code'): # Python2 + func_code = method.func_code + elif hasattr(method, '__code__'): # Python3 + func_code = method.__code__ + if func_code and func_code == code: + clsname = first_arg_class.__name__ + + return clsname + + +def get_non_pydevd_threads(): + threads = threading.enumerate() + return [t for t in threads if t and not getattr(t, 'is_pydev_daemon_thread', False)] + + +if USE_CUSTOM_SYS_CURRENT_FRAMES and IS_PYPY: + # On PyPy we can use its fake_frames to get the traceback + # (instead of the actual real frames that need the tracing to be correct). + _tid_to_frame_for_dump_threads = sys._current_frames +else: + from _pydevd_bundle.pydevd_constants import _current_frames as _tid_to_frame_for_dump_threads + + +def dump_threads(stream=None, show_pydevd_threads=True): + ''' + Helper to dump thread info. + ''' + if stream is None: + stream = sys.stderr + thread_id_to_name_and_is_pydevd_thread = {} + try: + threading_enumerate = _pydev_saved_modules.pydevd_saved_threading_enumerate + if threading_enumerate is None: + threading_enumerate = threading.enumerate + + for t in threading_enumerate(): + is_pydevd_thread = getattr(t, 'is_pydev_daemon_thread', False) + thread_id_to_name_and_is_pydevd_thread[t.ident] = ( + '%s (daemon: %s, pydevd thread: %s)' % (t.name, t.daemon, is_pydevd_thread), + is_pydevd_thread + ) + except: + pass + + stream.write('===============================================================================\n') + stream.write('Threads running\n') + stream.write('================================= Thread Dump =================================\n') + stream.flush() + + for thread_id, frame in _tid_to_frame_for_dump_threads().items(): + name, is_pydevd_thread = thread_id_to_name_and_is_pydevd_thread.get(thread_id, (thread_id, False)) + if not show_pydevd_threads and is_pydevd_thread: + continue + + stream.write('\n-------------------------------------------------------------------------------\n') + stream.write(" Thread %s" % (name,)) + stream.write('\n\n') + + for i, (filename, lineno, name, line) in enumerate(traceback.extract_stack(frame)): + + stream.write(' File "%s", line %d, in %s\n' % (filename, lineno, name)) + if line: + stream.write(" %s\n" % (line.strip())) + + if i == 0 and 'self' in frame.f_locals: + stream.write(' self: ') + try: + stream.write(str(frame.f_locals['self'])) + except: + stream.write('Unable to get str of: %s' % (type(frame.f_locals['self']),)) + stream.write('\n') + stream.flush() + + stream.write('\n=============================== END Thread Dump ===============================') + stream.flush() + + +def _extract_variable_nested_braces(char_iter): + expression = [] + level = 0 + for c in char_iter: + if c == '{': + level += 1 + if c == '}': + level -= 1 + if level == -1: + return ''.join(expression).strip() + expression.append(c) + raise SyntaxError('Unbalanced braces in expression.') + + +def _extract_expression_list(log_message): + # Note: not using re because of nested braces. + expression = [] + expression_vars = [] + char_iter = iter(log_message) + for c in char_iter: + if c == '{': + expression_var = _extract_variable_nested_braces(char_iter) + if expression_var: + expression.append('%s') + expression_vars.append(expression_var) + else: + expression.append(c) + + expression = ''.join(expression) + return expression, expression_vars + + +def convert_dap_log_message_to_expression(log_message): + try: + expression, expression_vars = _extract_expression_list(log_message) + except SyntaxError: + return repr('Unbalanced braces in: %s' % (log_message)) + if not expression_vars: + return repr(expression) + # Note: use '%' to be compatible with Python 2.6. + return repr(expression) + ' % (' + ', '.join(str(x) for x in expression_vars) + ',)' + + +def notify_about_gevent_if_needed(stream=None): + ''' + When debugging with gevent check that the gevent flag is used if the user uses the gevent + monkey-patching. + + :return bool: + Returns True if a message had to be shown to the user and False otherwise. + ''' + stream = stream if stream is not None else sys.stderr + if not SUPPORT_GEVENT: + gevent_monkey = sys.modules.get('gevent.monkey') + if gevent_monkey is not None: + try: + saved = gevent_monkey.saved + except AttributeError: + pydev_log.exception_once('Error checking for gevent monkey-patching.') + return False + + if saved: + # Note: print to stderr as it may deadlock the debugger. + sys.stderr.write('%s\n' % (GEVENT_SUPPORT_NOT_SET_MSG,)) + return True + + return False + + +def hasattr_checked(obj, name): + try: + getattr(obj, name) + except: + # i.e.: Handle any exception, not only AttributeError. + return False + else: + return True + + +def getattr_checked(obj, name): + try: + return getattr(obj, name) + except: + # i.e.: Handle any exception, not only AttributeError. + return None + + +def dir_checked(obj): + try: + return dir(obj) + except: + return [] + + +def isinstance_checked(obj, cls): + try: + return isinstance(obj, cls) + except: + return False + + +class ScopeRequest(object): + + __slots__ = ['variable_reference', 'scope'] + + def __init__(self, variable_reference, scope): + assert scope in ('globals', 'locals') + self.variable_reference = variable_reference + self.scope = scope + + def __eq__(self, o): + if isinstance(o, ScopeRequest): + return self.variable_reference == o.variable_reference and self.scope == o.scope + + return False + + def __ne__(self, o): + return not self == o + + def __hash__(self): + return hash((self.variable_reference, self.scope)) + + +class DAPGrouper(object): + ''' + Note: this is a helper class to group variables on the debug adapter protocol (DAP). For + the xml protocol the type is just added to each variable and the UI can group/hide it as needed. + ''' + + SCOPE_SPECIAL_VARS = 'special variables' + SCOPE_PROTECTED_VARS = 'protected variables' + SCOPE_FUNCTION_VARS = 'function variables' + SCOPE_CLASS_VARS = 'class variables' + + SCOPES_SORTED = [ + SCOPE_SPECIAL_VARS, + SCOPE_PROTECTED_VARS, + SCOPE_FUNCTION_VARS, + SCOPE_CLASS_VARS, + ] + + __slots__ = ['variable_reference', 'scope', 'contents_debug_adapter_protocol'] + + def __init__(self, scope): + self.variable_reference = id(self) + self.scope = scope + self.contents_debug_adapter_protocol = [] + + def get_contents_debug_adapter_protocol(self): + return self.contents_debug_adapter_protocol[:] + + def __eq__(self, o): + if isinstance(o, ScopeRequest): + return self.variable_reference == o.variable_reference and self.scope == o.scope + + return False + + def __ne__(self, o): + return not self == o + + def __hash__(self): + return hash((self.variable_reference, self.scope)) + + def __repr__(self): + return '' + + def __str__(self): + return '' + + +def interrupt_main_thread(main_thread): + ''' + Generates a KeyboardInterrupt in the main thread by sending a Ctrl+C + or by calling thread.interrupt_main(). + + :param main_thread: + Needed because Jython needs main_thread._thread.interrupt() to be called. + + Note: if unable to send a Ctrl+C, the KeyboardInterrupt will only be raised + when the next Python instruction is about to be executed (so, it won't interrupt + a sleep(1000)). + ''' + pydev_log.debug('Interrupt main thread.') + called = False + try: + if os.name == 'posix': + # On Linux we can't interrupt 0 as in Windows because it's + # actually owned by a process -- on the good side, signals + # work much better on Linux! + os.kill(os.getpid(), signal.SIGINT) + called = True + + elif os.name == 'nt': + # This generates a Ctrl+C only for the current process and not + # to the process group! + # Note: there doesn't seem to be any public documentation for this + # function (although it seems to be present from Windows Server 2003 SP1 onwards + # according to: https://www.geoffchappell.com/studies/windows/win32/kernel32/api/index.htm) + ctypes.windll.kernel32.CtrlRoutine(0) + + # The code below is deprecated because it actually sends a Ctrl+C + # to the process group, so, if this was a process created without + # passing `CREATE_NEW_PROCESS_GROUP` the signal may be sent to the + # parent process and to sub-processes too (which is not ideal -- + # for instance, when using pytest-xdist, it'll actually stop the + # testing, even when called in the subprocess). + + # if hasattr_checked(signal, 'CTRL_C_EVENT'): + # os.kill(0, signal.CTRL_C_EVENT) + # else: + # # Python 2.6 + # ctypes.windll.kernel32.GenerateConsoleCtrlEvent(0, 0) + called = True + + except: + # If something went wrong, fallback to interrupting when the next + # Python instruction is being called. + pydev_log.exception('Error interrupting main thread (using fallback).') + + if not called: + try: + # In this case, we don't really interrupt a sleep() nor IO operations + # (this makes the KeyboardInterrupt be sent only when the next Python + # instruction is about to be executed). + if hasattr(thread, 'interrupt_main'): + thread.interrupt_main() + else: + main_thread._thread.interrupt() # Jython + except: + pydev_log.exception('Error on interrupt main thread fallback.') + + +class Timer(object): + + def __init__(self, min_diff=PYDEVD_WARN_SLOW_RESOLVE_TIMEOUT): + self.min_diff = min_diff + self._curr_time = time.time() + + def print_time(self, msg='Elapsed:'): + old = self._curr_time + new = self._curr_time = time.time() + diff = new - old + if diff >= self.min_diff: + print('%s: %.2fs' % (msg, diff)) + + def _report_slow(self, compute_msg, *args): + old = self._curr_time + new = self._curr_time = time.time() + diff = new - old + if diff >= self.min_diff: + py_db = get_global_debugger() + if py_db is not None: + msg = compute_msg(diff, *args) + py_db.writer.add_command(py_db.cmd_factory.make_warning_message(msg)) + + def report_if_compute_repr_attr_slow(self, attrs_tab_separated, attr_name, attr_type): + self._report_slow(self._compute_repr_slow, attrs_tab_separated, attr_name, attr_type) + + def _compute_repr_slow(self, diff, attrs_tab_separated, attr_name, attr_type): + try: + attr_type = attr_type.__name__ + except: + pass + if attrs_tab_separated: + return 'pydevd warning: Computing repr of %s.%s (%s) was slow (took %.2fs)\n' % ( + attrs_tab_separated.replace('\t', '.'), attr_name, attr_type, diff) + else: + return 'pydevd warning: Computing repr of %s (%s) was slow (took %.2fs)\n' % ( + attr_name, attr_type, diff) + + def report_if_getting_attr_slow(self, cls, attr_name): + self._report_slow(self._compute_get_attr_slow, cls, attr_name) + + def _compute_get_attr_slow(self, diff, cls, attr_name): + try: + cls = cls.__name__ + except: + pass + return 'pydevd warning: Getting attribute %s.%s was slow (took %.2fs)\n' % (cls, attr_name, diff) + + +def import_attr_from_module(import_with_attr_access): + if '.' not in import_with_attr_access: + # We need at least one '.' (we don't support just the module import, we need the attribute access too). + raise ImportError('Unable to import module with attr access: %s' % (import_with_attr_access,)) + + module_name, attr_name = import_with_attr_access.rsplit('.', 1) + + while True: + try: + mod = import_module(module_name) + except ImportError: + if '.' not in module_name: + raise ImportError('Unable to import module with attr access: %s' % (import_with_attr_access,)) + + module_name, new_attr_part = module_name.rsplit('.', 1) + attr_name = new_attr_part + '.' + attr_name + else: + # Ok, we got the base module, now, get the attribute we need. + try: + for attr in attr_name.split('.'): + mod = getattr(mod, attr) + return mod + except: + raise ImportError('Unable to import module with attr access: %s' % (import_with_attr_access,)) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vars.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vars.py new file mode 100644 index 0000000..8010093 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vars.py @@ -0,0 +1,839 @@ +""" pydevd_vars deals with variables: + resolution/conversion to XML. +""" +import pickle +from _pydevd_bundle.pydevd_constants import get_frame, get_current_thread_id, \ + iter_chars, silence_warnings_decorator, get_global_debugger + +from _pydevd_bundle.pydevd_xml import ExceptionOnEvaluate, get_type, var_to_xml +from _pydev_bundle import pydev_log +import functools +from _pydevd_bundle.pydevd_thread_lifecycle import resume_threads, mark_thread_suspended, suspend_all_threads +from _pydevd_bundle.pydevd_comm_constants import CMD_SET_BREAK + +import sys # @Reimport + +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle import pydevd_save_locals, pydevd_timeout, pydevd_constants +from _pydev_bundle.pydev_imports import Exec, execfile +from _pydevd_bundle.pydevd_utils import to_string +import inspect +from _pydevd_bundle.pydevd_daemon_thread import PyDBDaemonThread +from _pydevd_bundle.pydevd_save_locals import update_globals_and_locals +from functools import lru_cache + +SENTINEL_VALUE = [] + + +class VariableError(RuntimeError): + pass + + +def iter_frames(frame): + while frame is not None: + yield frame + frame = frame.f_back + frame = None + + +def dump_frames(thread_id): + sys.stdout.write('dumping frames\n') + if thread_id != get_current_thread_id(threading.current_thread()): + raise VariableError("find_frame: must execute on same thread") + + frame = get_frame() + for frame in iter_frames(frame): + sys.stdout.write('%s\n' % pickle.dumps(frame)) + + +@silence_warnings_decorator +def getVariable(dbg, thread_id, frame_id, scope, attrs): + """ + returns the value of a variable + + :scope: can be BY_ID, EXPRESSION, GLOBAL, LOCAL, FRAME + + BY_ID means we'll traverse the list of all objects alive to get the object. + + :attrs: after reaching the proper scope, we have to get the attributes until we find + the proper location (i.e.: obj\tattr1\tattr2) + + :note: when BY_ID is used, the frame_id is considered the id of the object to find and + not the frame (as we don't care about the frame in this case). + """ + if scope == 'BY_ID': + if thread_id != get_current_thread_id(threading.current_thread()): + raise VariableError("getVariable: must execute on same thread") + + try: + import gc + objects = gc.get_objects() + except: + pass # Not all python variants have it. + else: + frame_id = int(frame_id) + for var in objects: + if id(var) == frame_id: + if attrs is not None: + attrList = attrs.split('\t') + for k in attrList: + _type, _type_name, resolver = get_type(var) + var = resolver.resolve(var, k) + + return var + + # If it didn't return previously, we coudn't find it by id (i.e.: alrceady garbage collected). + sys.stderr.write('Unable to find object with id: %s\n' % (frame_id,)) + return None + + frame = dbg.find_frame(thread_id, frame_id) + if frame is None: + return {} + + if attrs is not None: + attrList = attrs.split('\t') + else: + attrList = [] + + for attr in attrList: + attr.replace("@_@TAB_CHAR@_@", '\t') + + if scope == 'EXPRESSION': + for count in range(len(attrList)): + if count == 0: + # An Expression can be in any scope (globals/locals), therefore it needs to evaluated as an expression + var = evaluate_expression(dbg, frame, attrList[count], False) + else: + _type, _type_name, resolver = get_type(var) + var = resolver.resolve(var, attrList[count]) + else: + if scope == "GLOBAL": + var = frame.f_globals + del attrList[0] # globals are special, and they get a single dummy unused attribute + else: + # in a frame access both locals and globals as Python does + var = {} + var.update(frame.f_globals) + var.update(frame.f_locals) + + for k in attrList: + _type, _type_name, resolver = get_type(var) + var = resolver.resolve(var, k) + + return var + + +def resolve_compound_variable_fields(dbg, thread_id, frame_id, scope, attrs): + """ + Resolve compound variable in debugger scopes by its name and attributes + + :param thread_id: id of the variable's thread + :param frame_id: id of the variable's frame + :param scope: can be BY_ID, EXPRESSION, GLOBAL, LOCAL, FRAME + :param attrs: after reaching the proper scope, we have to get the attributes until we find + the proper location (i.e.: obj\tattr1\tattr2) + :return: a dictionary of variables's fields + """ + + var = getVariable(dbg, thread_id, frame_id, scope, attrs) + + try: + _type, type_name, resolver = get_type(var) + return type_name, resolver.get_dictionary(var) + except: + pydev_log.exception('Error evaluating: thread_id: %s\nframe_id: %s\nscope: %s\nattrs: %s.', + thread_id, frame_id, scope, attrs) + + +def resolve_var_object(var, attrs): + """ + Resolve variable's attribute + + :param var: an object of variable + :param attrs: a sequence of variable's attributes separated by \t (i.e.: obj\tattr1\tattr2) + :return: a value of resolved variable's attribute + """ + if attrs is not None: + attr_list = attrs.split('\t') + else: + attr_list = [] + for k in attr_list: + type, _type_name, resolver = get_type(var) + var = resolver.resolve(var, k) + return var + + +def resolve_compound_var_object_fields(var, attrs): + """ + Resolve compound variable by its object and attributes + + :param var: an object of variable + :param attrs: a sequence of variable's attributes separated by \t (i.e.: obj\tattr1\tattr2) + :return: a dictionary of variables's fields + """ + attr_list = attrs.split('\t') + + for k in attr_list: + type, _type_name, resolver = get_type(var) + var = resolver.resolve(var, k) + + try: + type, _type_name, resolver = get_type(var) + return resolver.get_dictionary(var) + except: + pydev_log.exception() + + +def custom_operation(dbg, thread_id, frame_id, scope, attrs, style, code_or_file, operation_fn_name): + """ + We'll execute the code_or_file and then search in the namespace the operation_fn_name to execute with the given var. + + code_or_file: either some code (i.e.: from pprint import pprint) or a file to be executed. + operation_fn_name: the name of the operation to execute after the exec (i.e.: pprint) + """ + expressionValue = getVariable(dbg, thread_id, frame_id, scope, attrs) + + try: + namespace = {'__name__': ''} + if style == "EXECFILE": + namespace['__file__'] = code_or_file + execfile(code_or_file, namespace, namespace) + else: # style == EXEC + namespace['__file__'] = '' + Exec(code_or_file, namespace, namespace) + + return str(namespace[operation_fn_name](expressionValue)) + except: + pydev_log.exception() + + +@lru_cache(3) +def _expression_to_evaluate(expression): + keepends = True + lines = expression.splitlines(keepends) + # find first non-empty line + chars_to_strip = 0 + for line in lines: + if line.strip(): # i.e.: check first non-empty line + for c in iter_chars(line): + if c.isspace(): + chars_to_strip += 1 + else: + break + break + + if chars_to_strip: + # I.e.: check that the chars we'll remove are really only whitespaces. + proceed = True + new_lines = [] + for line in lines: + if not proceed: + break + for c in iter_chars(line[:chars_to_strip]): + if not c.isspace(): + proceed = False + break + + new_lines.append(line[chars_to_strip:]) + + if proceed: + if isinstance(expression, bytes): + expression = b''.join(new_lines) + else: + expression = u''.join(new_lines) + + return expression + + +def eval_in_context(expression, global_vars, local_vars, py_db=None): + result = None + try: + compiled = compile_as_eval(expression) + is_async = inspect.CO_COROUTINE & compiled.co_flags == inspect.CO_COROUTINE + + if is_async: + if py_db is None: + py_db = get_global_debugger() + if py_db is None: + raise RuntimeError('Cannot evaluate async without py_db.') + t = _EvalAwaitInNewEventLoop(py_db, compiled, global_vars, local_vars) + t.start() + t.join() + + if t.exc: + raise t.exc[1].with_traceback(t.exc[2]) + else: + result = t.evaluated_value + else: + result = eval(compiled, global_vars, local_vars) + except (Exception, KeyboardInterrupt): + etype, result, tb = sys.exc_info() + result = ExceptionOnEvaluate(result, etype, tb) + + # Ok, we have the initial error message, but let's see if we're dealing with a name mangling error... + try: + if '.__' in expression: + # Try to handle '__' name mangling (for simple cases such as self.__variable.__another_var). + split = expression.split('.') + entry = split[0] + + if local_vars is None: + local_vars = global_vars + curr = local_vars[entry] # Note: we want the KeyError if it's not there. + for entry in split[1:]: + if entry.startswith('__') and not hasattr(curr, entry): + entry = '_%s%s' % (curr.__class__.__name__, entry) + curr = getattr(curr, entry) + + result = curr + except: + pass + return result + + +def _run_with_interrupt_thread(original_func, py_db, curr_thread, frame, expression, is_exec): + on_interrupt_threads = None + timeout_tracker = py_db.timeout_tracker # : :type timeout_tracker: TimeoutTracker + + interrupt_thread_timeout = pydevd_constants.PYDEVD_INTERRUPT_THREAD_TIMEOUT + + if interrupt_thread_timeout > 0: + on_interrupt_threads = pydevd_timeout.create_interrupt_this_thread_callback() + pydev_log.info('Doing evaluate with interrupt threads timeout: %s.', interrupt_thread_timeout) + + if on_interrupt_threads is None: + return original_func(py_db, frame, expression, is_exec) + else: + with timeout_tracker.call_on_timeout(interrupt_thread_timeout, on_interrupt_threads): + return original_func(py_db, frame, expression, is_exec) + + +def _run_with_unblock_threads(original_func, py_db, curr_thread, frame, expression, is_exec): + on_timeout_unblock_threads = None + timeout_tracker = py_db.timeout_tracker # : :type timeout_tracker: TimeoutTracker + + if py_db.multi_threads_single_notification: + unblock_threads_timeout = pydevd_constants.PYDEVD_UNBLOCK_THREADS_TIMEOUT + else: + unblock_threads_timeout = -1 # Don't use this if threads are managed individually. + + if unblock_threads_timeout >= 0: + pydev_log.info('Doing evaluate with unblock threads timeout: %s.', unblock_threads_timeout) + tid = get_current_thread_id(curr_thread) + + def on_timeout_unblock_threads(): + on_timeout_unblock_threads.called = True + pydev_log.info('Resuming threads after evaluate timeout.') + resume_threads('*', except_thread=curr_thread) + py_db.threads_suspended_single_notification.on_thread_resume(tid) + + on_timeout_unblock_threads.called = False + + try: + if on_timeout_unblock_threads is None: + return _run_with_interrupt_thread(original_func, py_db, curr_thread, frame, expression, is_exec) + else: + with timeout_tracker.call_on_timeout(unblock_threads_timeout, on_timeout_unblock_threads): + return _run_with_interrupt_thread(original_func, py_db, curr_thread, frame, expression, is_exec) + + finally: + if on_timeout_unblock_threads is not None and on_timeout_unblock_threads.called: + mark_thread_suspended(curr_thread, CMD_SET_BREAK) + py_db.threads_suspended_single_notification.increment_suspend_time() + suspend_all_threads(py_db, except_thread=curr_thread) + py_db.threads_suspended_single_notification.on_thread_suspend(tid, CMD_SET_BREAK) + + +def _evaluate_with_timeouts(original_func): + ''' + Provides a decorator that wraps the original evaluate to deal with slow evaluates. + + If some evaluation is too slow, we may show a message, resume threads or interrupt them + as needed (based on the related configurations). + ''' + + @functools.wraps(original_func) + def new_func(py_db, frame, expression, is_exec): + if py_db is None: + # Only for testing... + pydev_log.critical('_evaluate_with_timeouts called without py_db!') + return original_func(py_db, frame, expression, is_exec) + warn_evaluation_timeout = pydevd_constants.PYDEVD_WARN_EVALUATION_TIMEOUT + curr_thread = threading.current_thread() + + def on_warn_evaluation_timeout(): + py_db.writer.add_command(py_db.cmd_factory.make_evaluation_timeout_msg( + py_db, expression, curr_thread)) + + timeout_tracker = py_db.timeout_tracker # : :type timeout_tracker: TimeoutTracker + with timeout_tracker.call_on_timeout(warn_evaluation_timeout, on_warn_evaluation_timeout): + return _run_with_unblock_threads(original_func, py_db, curr_thread, frame, expression, is_exec) + + return new_func + + +_ASYNC_COMPILE_FLAGS = None +try: + from ast import PyCF_ALLOW_TOP_LEVEL_AWAIT + _ASYNC_COMPILE_FLAGS = PyCF_ALLOW_TOP_LEVEL_AWAIT +except: + pass + + +def compile_as_eval(expression): + ''' + + :param expression: + The expression to be _compiled. + + :return: code object + + :raises Exception if the expression cannot be evaluated. + ''' + expression_to_evaluate = _expression_to_evaluate(expression) + if _ASYNC_COMPILE_FLAGS is not None: + return compile(expression_to_evaluate, '', 'eval', _ASYNC_COMPILE_FLAGS) + else: + return compile(expression_to_evaluate, '', 'eval') + + +def _compile_as_exec(expression): + ''' + + :param expression: + The expression to be _compiled. + + :return: code object + + :raises Exception if the expression cannot be evaluated. + ''' + expression_to_evaluate = _expression_to_evaluate(expression) + if _ASYNC_COMPILE_FLAGS is not None: + return compile(expression_to_evaluate, '', 'exec', _ASYNC_COMPILE_FLAGS) + else: + return compile(expression_to_evaluate, '', 'exec') + + +class _EvalAwaitInNewEventLoop(PyDBDaemonThread): + + def __init__(self, py_db, compiled, updated_globals, updated_locals): + PyDBDaemonThread.__init__(self, py_db) + self._compiled = compiled + self._updated_globals = updated_globals + self._updated_locals = updated_locals + + # Output + self.evaluated_value = None + self.exc = None + + async def _async_func(self): + return await eval(self._compiled, self._updated_locals, self._updated_globals) + + def _on_run(self): + try: + import asyncio + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + self.evaluated_value = asyncio.run(self._async_func()) + except: + self.exc = sys.exc_info() + + +@_evaluate_with_timeouts +def evaluate_expression(py_db, frame, expression, is_exec): + ''' + :param str expression: + The expression to be evaluated. + + Note that if the expression is indented it's automatically dedented (based on the indentation + found on the first non-empty line). + + i.e.: something as: + + ` + def method(): + a = 1 + ` + + becomes: + + ` + def method(): + a = 1 + ` + + Also, it's possible to evaluate calls with a top-level await (currently this is done by + creating a new event loop in a new thread and making the evaluate at that thread -- note + that this is still done synchronously so the evaluation has to finish before this + function returns). + + :param is_exec: determines if we should do an exec or an eval. + There are some changes in this function depending on whether it's an exec or an eval. + + When it's an exec (i.e.: is_exec==True): + This function returns None. + Any exception that happens during the evaluation is reraised. + If the expression could actually be evaluated, the variable is printed to the console if not None. + + When it's an eval (i.e.: is_exec==False): + This function returns the result from the evaluation. + If some exception happens in this case, the exception is caught and a ExceptionOnEvaluate is returned. + Also, in this case we try to resolve name-mangling (i.e.: to be able to add a self.__my_var watch). + + :param py_db: + The debugger. Only needed if some top-level await is detected (for creating a + PyDBDaemonThread). + ''' + if frame is None: + return + + # This is very tricky. Some statements can change locals and use them in the same + # call (see https://github.com/microsoft/debugpy/issues/815), also, if locals and globals are + # passed separately, it's possible that one gets updated but apparently Python will still + # try to load from the other, so, what's done is that we merge all in a single dict and + # then go on and update the frame with the results afterwards. + + # -- see tests in test_evaluate_expression.py + + # This doesn't work because the variables aren't updated in the locals in case the + # evaluation tries to set a variable and use it in the same expression. + # updated_globals = frame.f_globals + # updated_locals = frame.f_locals + + # This doesn't work because the variables aren't updated in the locals in case the + # evaluation tries to set a variable and use it in the same expression. + # updated_globals = {} + # updated_globals.update(frame.f_globals) + # updated_globals.update(frame.f_locals) + # + # updated_locals = frame.f_locals + + # This doesn't work either in the case where the evaluation tries to set a variable and use + # it in the same expression (I really don't know why as it seems like this *should* work + # in theory but doesn't in practice). + # updated_globals = {} + # updated_globals.update(frame.f_globals) + # + # updated_locals = {} + # updated_globals.update(frame.f_locals) + + # This is the only case that worked consistently to run the tests in test_evaluate_expression.py + # It's a bit unfortunate because although the exec works in this case, we have to manually + # put the updates in the frame locals afterwards. + updated_globals = {} + updated_globals.update(frame.f_globals) + updated_globals.update(frame.f_locals) + + initial_globals = updated_globals.copy() + + updated_locals = None + + try: + expression = expression.replace('@LINE@', '\n') + + if is_exec: + try: + # Try to make it an eval (if it is an eval we can print it, otherwise we'll exec it and + # it will have whatever the user actually did) + compiled = compile_as_eval(expression) + except Exception: + compiled = None + + if compiled is None: + try: + compiled = _compile_as_exec(expression) + is_async = inspect.CO_COROUTINE & compiled.co_flags == inspect.CO_COROUTINE + if is_async: + t = _EvalAwaitInNewEventLoop(py_db, compiled, updated_globals, updated_locals) + t.start() + t.join() + + if t.exc: + raise t.exc[1].with_traceback(t.exc[2]) + else: + Exec(compiled, updated_globals, updated_locals) + finally: + # Update the globals even if it errored as it may have partially worked. + update_globals_and_locals(updated_globals, initial_globals, frame) + else: + is_async = inspect.CO_COROUTINE & compiled.co_flags == inspect.CO_COROUTINE + if is_async: + t = _EvalAwaitInNewEventLoop(py_db, compiled, updated_globals, updated_locals) + t.start() + t.join() + + if t.exc: + raise t.exc[1].with_traceback(t.exc[2]) + else: + result = t.evaluated_value + else: + result = eval(compiled, updated_globals, updated_locals) + if result is not None: # Only print if it's not None (as python does) + sys.stdout.write('%s\n' % (result,)) + return + + else: + ret = eval_in_context(expression, updated_globals, updated_locals, py_db) + try: + is_exception_returned = ret.__class__ == ExceptionOnEvaluate + except: + pass + else: + if not is_exception_returned: + # i.e.: by using a walrus assignment (:=), expressions can change the locals, + # so, make sure that we save the locals back to the frame. + update_globals_and_locals(updated_globals, initial_globals, frame) + return ret + finally: + # Should not be kept alive if an exception happens and this frame is kept in the stack. + del updated_globals + del updated_locals + del initial_globals + del frame + + +def change_attr_expression(frame, attr, expression, dbg, value=SENTINEL_VALUE): + '''Changes some attribute in a given frame. + ''' + if frame is None: + return + + try: + expression = expression.replace('@LINE@', '\n') + + if dbg.plugin and value is SENTINEL_VALUE: + result = dbg.plugin.change_variable(frame, attr, expression) + if result: + return result + + if attr[:7] == "Globals": + attr = attr[8:] + if attr in frame.f_globals: + if value is SENTINEL_VALUE: + value = eval(expression, frame.f_globals, frame.f_locals) + frame.f_globals[attr] = value + return frame.f_globals[attr] + else: + if '.' not in attr: # i.e.: if we have a '.', we're changing some attribute of a local var. + if pydevd_save_locals.is_save_locals_available(): + if value is SENTINEL_VALUE: + value = eval(expression, frame.f_globals, frame.f_locals) + frame.f_locals[attr] = value + pydevd_save_locals.save_locals(frame) + return frame.f_locals[attr] + + # i.e.: case with '.' or save locals not available (just exec the assignment in the frame). + if value is SENTINEL_VALUE: + value = eval(expression, frame.f_globals, frame.f_locals) + result = value + Exec('%s=%s' % (attr, expression), frame.f_globals, frame.f_locals) + return result + + except Exception: + pydev_log.exception() + + +MAXIMUM_ARRAY_SIZE = 100 +MAX_SLICE_SIZE = 1000 + + +def table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format): + _, type_name, _ = get_type(array) + if type_name == 'ndarray': + array, metaxml, r, c, f = array_to_meta_xml(array, name, format) + xml = metaxml + format = '%' + f + if rows == -1 and cols == -1: + rows = r + cols = c + xml += array_to_xml(array, roffset, coffset, rows, cols, format) + elif type_name == 'DataFrame': + xml = dataframe_to_xml(array, name, roffset, coffset, rows, cols, format) + else: + raise VariableError("Do not know how to convert type %s to table" % (type_name)) + + return "%s" % xml + + +def array_to_xml(array, roffset, coffset, rows, cols, format): + xml = "" + rows = min(rows, MAXIMUM_ARRAY_SIZE) + cols = min(cols, MAXIMUM_ARRAY_SIZE) + + # there is no obvious rule for slicing (at least 5 choices) + if len(array) == 1 and (rows > 1 or cols > 1): + array = array[0] + if array.size > len(array): + array = array[roffset:, coffset:] + rows = min(rows, len(array)) + cols = min(cols, len(array[0])) + if len(array) == 1: + array = array[0] + elif array.size == len(array): + if roffset == 0 and rows == 1: + array = array[coffset:] + cols = min(cols, len(array)) + elif coffset == 0 and cols == 1: + array = array[roffset:] + rows = min(rows, len(array)) + + xml += "" % (rows, cols) + for row in range(rows): + xml += "" % to_string(row) + for col in range(cols): + value = array + if rows == 1 or cols == 1: + if rows == 1 and cols == 1: + value = array[0] + else: + if rows == 1: + dim = col + else: + dim = row + value = array[dim] + if "ndarray" in str(type(value)): + value = value[0] + else: + value = array[row][col] + value = format % value + xml += var_to_xml(value, '') + return xml + + +def array_to_meta_xml(array, name, format): + type = array.dtype.kind + slice = name + l = len(array.shape) + + # initial load, compute slice + if format == '%': + if l > 2: + slice += '[0]' * (l - 2) + for r in range(l - 2): + array = array[0] + if type == 'f': + format = '.5f' + elif type == 'i' or type == 'u': + format = 'd' + else: + format = 's' + else: + format = format.replace('%', '') + + l = len(array.shape) + reslice = "" + if l > 2: + raise Exception("%s has more than 2 dimensions." % slice) + elif l == 1: + # special case with 1D arrays arr[i, :] - row, but arr[:, i] - column with equal shape and ndim + # http://stackoverflow.com/questions/16837946/numpy-a-2-rows-1-column-file-loadtxt-returns-1row-2-columns + # explanation: http://stackoverflow.com/questions/15165170/how-do-i-maintain-row-column-orientation-of-vectors-in-numpy?rq=1 + # we use kind of a hack - get information about memory from C_CONTIGUOUS + is_row = array.flags['C_CONTIGUOUS'] + + if is_row: + rows = 1 + cols = min(len(array), MAX_SLICE_SIZE) + if cols < len(array): + reslice = '[0:%s]' % (cols) + array = array[0:cols] + else: + cols = 1 + rows = min(len(array), MAX_SLICE_SIZE) + if rows < len(array): + reslice = '[0:%s]' % (rows) + array = array[0:rows] + elif l == 2: + rows = min(array.shape[-2], MAX_SLICE_SIZE) + cols = min(array.shape[-1], MAX_SLICE_SIZE) + if cols < array.shape[-1] or rows < array.shape[-2]: + reslice = '[0:%s, 0:%s]' % (rows, cols) + array = array[0:rows, 0:cols] + + # avoid slice duplication + if not slice.endswith(reslice): + slice += reslice + + bounds = (0, 0) + if type in "biufc": + bounds = (array.min(), array.max()) + xml = '' % \ + (slice, rows, cols, format, type, bounds[1], bounds[0]) + return array, xml, rows, cols, format + + +def dataframe_to_xml(df, name, roffset, coffset, rows, cols, format): + """ + :type df: pandas.core.frame.DataFrame + :type name: str + :type coffset: int + :type roffset: int + :type rows: int + :type cols: int + :type format: str + + + """ + num_rows = min(df.shape[0], MAX_SLICE_SIZE) + num_cols = min(df.shape[1], MAX_SLICE_SIZE) + if (num_rows, num_cols) != df.shape: + df = df.iloc[0:num_rows, 0: num_cols] + slice = '.iloc[0:%s, 0:%s]' % (num_rows, num_cols) + else: + slice = '' + slice = name + slice + xml = '\n' % \ + (slice, num_rows, num_cols) + + if (rows, cols) == (-1, -1): + rows, cols = num_rows, num_cols + + rows = min(rows, MAXIMUM_ARRAY_SIZE) + cols = min(min(cols, MAXIMUM_ARRAY_SIZE), num_cols) + # need to precompute column bounds here before slicing! + col_bounds = [None] * cols + for col in range(cols): + dtype = df.dtypes.iloc[coffset + col].kind + if dtype in "biufc": + cvalues = df.iloc[:, coffset + col] + bounds = (cvalues.min(), cvalues.max()) + else: + bounds = (0, 0) + col_bounds[col] = bounds + + df = df.iloc[roffset: roffset + rows, coffset: coffset + cols] + rows, cols = df.shape + + xml += "\n" % (rows, cols) + format = format.replace('%', '') + col_formats = [] + + get_label = lambda label: str(label) if not isinstance(label, tuple) else '/'.join(map(str, label)) + + for col in range(cols): + dtype = df.dtypes.iloc[col].kind + if dtype == 'f' and format: + fmt = format + elif dtype == 'f': + fmt = '.5f' + elif dtype == 'i' or dtype == 'u': + fmt = 'd' + else: + fmt = 's' + col_formats.append('%' + fmt) + bounds = col_bounds[col] + + xml += '\n' % \ + (str(col), get_label(df.axes[1].values[col]), dtype, fmt, bounds[1], bounds[0]) + for row, label in enumerate(iter(df.axes[0])): + xml += "\n" % \ + (str(row), get_label(label)) + xml += "\n" + xml += "\n" % (rows, cols) + for row in range(rows): + xml += "\n" % str(row) + for col in range(cols): + value = df.iat[row, col] + value = col_formats[col] % value + xml += var_to_xml(value, '') + return xml diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vm_type.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vm_type.py new file mode 120000 index 0000000..c010ce1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vm_type.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a2/c3/e1/8220d1714eba2855107df511484b751b12ec4b907e0ee5754e11d1fec6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_xml.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_xml.py new file mode 100644 index 0000000..dcd1cdc --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_xml.py @@ -0,0 +1,399 @@ +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_extension_utils +from _pydevd_bundle import pydevd_resolver +import sys +from _pydevd_bundle.pydevd_constants import BUILTINS_MODULE_NAME, MAXIMUM_VARIABLE_REPRESENTATION_SIZE, \ + RETURN_VALUES_DICT, LOAD_VALUES_ASYNC, DEFAULT_VALUE +from _pydev_bundle.pydev_imports import quote +from _pydevd_bundle.pydevd_extension_api import TypeResolveProvider, StrPresentationProvider +from _pydevd_bundle.pydevd_utils import isinstance_checked, hasattr_checked, DAPGrouper +from _pydevd_bundle.pydevd_resolver import get_var_scope + +try: + import types + + frame_type = types.FrameType +except: + frame_type = None + + +def make_valid_xml_value(s): + # Same thing as xml.sax.saxutils.escape but also escaping double quotes. + return s.replace("&", "&").replace('<', '<').replace('>', '>').replace('"', '"') + + +class ExceptionOnEvaluate: + + def __init__(self, result, etype, tb): + self.result = result + self.etype = etype + self.tb = tb + + +_IS_JYTHON = sys.platform.startswith("java") + + +def _create_default_type_map(): + default_type_map = [ + # None means that it should not be treated as a compound variable + + # isintance does not accept a tuple on some versions of python, so, we must declare it expanded + (type(None), None,), + (int, None), + (float, None), + (complex, None), + (str, None), + (tuple, pydevd_resolver.tupleResolver), + (list, pydevd_resolver.tupleResolver), + (dict, pydevd_resolver.dictResolver), + ] + try: + from collections import OrderedDict + default_type_map.insert(0, (OrderedDict, pydevd_resolver.orderedDictResolver)) + # we should put it before dict + except: + pass + + try: + default_type_map.append((long, None)) # @UndefinedVariable + except: + pass # not available on all python versions + + default_type_map.append((DAPGrouper, pydevd_resolver.dapGrouperResolver)) + + try: + default_type_map.append((set, pydevd_resolver.setResolver)) + except: + pass # not available on all python versions + + try: + default_type_map.append((frozenset, pydevd_resolver.setResolver)) + except: + pass # not available on all python versions + + try: + from django.utils.datastructures import MultiValueDict + default_type_map.insert(0, (MultiValueDict, pydevd_resolver.multiValueDictResolver)) + # we should put it before dict + except: + pass # django may not be installed + + try: + from django.forms import BaseForm + default_type_map.insert(0, (BaseForm, pydevd_resolver.djangoFormResolver)) + # we should put it before instance resolver + except: + pass # django may not be installed + + try: + from collections import deque + default_type_map.append((deque, pydevd_resolver.dequeResolver)) + except: + pass + + if frame_type is not None: + default_type_map.append((frame_type, pydevd_resolver.frameResolver)) + + if _IS_JYTHON: + from org.python import core # @UnresolvedImport + default_type_map.append((core.PyNone, None)) + default_type_map.append((core.PyInteger, None)) + default_type_map.append((core.PyLong, None)) + default_type_map.append((core.PyFloat, None)) + default_type_map.append((core.PyComplex, None)) + default_type_map.append((core.PyString, None)) + default_type_map.append((core.PyTuple, pydevd_resolver.tupleResolver)) + default_type_map.append((core.PyList, pydevd_resolver.tupleResolver)) + default_type_map.append((core.PyDictionary, pydevd_resolver.dictResolver)) + default_type_map.append((core.PyStringMap, pydevd_resolver.dictResolver)) + + if hasattr(core, 'PyJavaInstance'): + # Jython 2.5b3 removed it. + default_type_map.append((core.PyJavaInstance, pydevd_resolver.instanceResolver)) + + return default_type_map + + +class TypeResolveHandler(object): + NO_PROVIDER = [] # Sentinel value (any mutable object to be used as a constant would be valid). + + def __init__(self): + # Note: don't initialize with the types we already know about so that the extensions can override + # the default resolvers that are already available if they want. + self._type_to_resolver_cache = {} + self._type_to_str_provider_cache = {} + self._initialized = False + + def _initialize(self): + self._default_type_map = _create_default_type_map() + self._resolve_providers = pydevd_extension_utils.extensions_of_type(TypeResolveProvider) + self._str_providers = pydevd_extension_utils.extensions_of_type(StrPresentationProvider) + self._initialized = True + + def get_type(self, o): + try: + try: + # Faster than type(o) as we don't need the function call. + type_object = o.__class__ # could fail here + type_name = type_object.__name__ + return self._get_type(o, type_object, type_name) # could fail here + except: + # Not all objects have __class__ (i.e.: there are bad bindings around). + type_object = type(o) + type_name = type_object.__name__ + + try: + return self._get_type(o, type_object, type_name) + except: + if isinstance(type_object, type): + # If it's still something manageable, use the default resolver, otherwise + # fallback to saying that it wasn't possible to get any info on it. + return type_object, str(type_name), pydevd_resolver.defaultResolver + + return 'Unable to get Type', 'Unable to get Type', None + except: + # This happens for org.python.core.InitModule + return 'Unable to get Type', 'Unable to get Type', None + + def _get_type(self, o, type_object, type_name): + # Note: we could have an exception here if the type_object is not hashable... + resolver = self._type_to_resolver_cache.get(type_object) + if resolver is not None: + return type_object, type_name, resolver + + if not self._initialized: + self._initialize() + + try: + for resolver in self._resolve_providers: + if resolver.can_provide(type_object, type_name): + # Cache it + self._type_to_resolver_cache[type_object] = resolver + return type_object, type_name, resolver + + for t in self._default_type_map: + if isinstance_checked(o, t[0]): + # Cache it + resolver = t[1] + self._type_to_resolver_cache[type_object] = resolver + return (type_object, type_name, resolver) + except: + pydev_log.exception() + + # No match return default (and cache it). + resolver = pydevd_resolver.defaultResolver + self._type_to_resolver_cache[type_object] = resolver + return type_object, type_name, resolver + + if _IS_JYTHON: + _base_get_type = _get_type + + def _get_type(self, o, type_object, type_name): + if type_name == 'org.python.core.PyJavaInstance': + return type_object, type_name, pydevd_resolver.instanceResolver + + if type_name == 'org.python.core.PyArray': + return type_object, type_name, pydevd_resolver.jyArrayResolver + + return self._base_get_type(o, type_object, type_name) + + def str_from_providers(self, o, type_object, type_name): + provider = self._type_to_str_provider_cache.get(type_object) + + if provider is self.NO_PROVIDER: + return None + + if provider is not None: + return provider.get_str(o) + + if not self._initialized: + self._initialize() + + for provider in self._str_providers: + if provider.can_provide(type_object, type_name): + self._type_to_str_provider_cache[type_object] = provider + try: + return provider.get_str(o) + except: + pydev_log.exception("Error when getting str with custom provider: %s." % (provider,)) + + self._type_to_str_provider_cache[type_object] = self.NO_PROVIDER + return None + + +_TYPE_RESOLVE_HANDLER = TypeResolveHandler() + +""" +def get_type(o): + Receives object and returns a triple (type_object, type_string, resolver). + + resolver != None means that variable is a container, and should be displayed as a hierarchy. + + Use the resolver to get its attributes. + + All container objects (i.e.: dict, list, tuple, object, etc) should have a resolver. +""" +get_type = _TYPE_RESOLVE_HANDLER.get_type + +_str_from_providers = _TYPE_RESOLVE_HANDLER.str_from_providers + + +def is_builtin(x): + return getattr(x, '__module__', None) == BUILTINS_MODULE_NAME + + +def should_evaluate_full_value(val): + return not LOAD_VALUES_ASYNC or (is_builtin(type(val)) and not isinstance_checked(val, (list, tuple, dict))) + + +def return_values_from_dict_to_xml(return_dict): + res = [] + for name, val in return_dict.items(): + res.append(var_to_xml(val, name, additional_in_xml=' isRetVal="True"')) + return ''.join(res) + + +def frame_vars_to_xml(frame_f_locals, hidden_ns=None): + """ dumps frame variables to XML + + """ + xml = [] + + keys = sorted(frame_f_locals) + + return_values_xml = [] + + for k in keys: + try: + v = frame_f_locals[k] + eval_full_val = should_evaluate_full_value(v) + + if k == '_pydev_stop_at_break': + continue + + if k == RETURN_VALUES_DICT: + for name, val in v.items(): + return_values_xml.append(var_to_xml(val, name, additional_in_xml=' isRetVal="True"')) + + else: + if hidden_ns is not None and k in hidden_ns: + xml.append(var_to_xml(v, str(k), additional_in_xml=' isIPythonHidden="True"', + evaluate_full_value=eval_full_val)) + else: + xml.append(var_to_xml(v, str(k), evaluate_full_value=eval_full_val)) + except Exception: + pydev_log.exception("Unexpected error, recovered safely.") + + # Show return values as the first entry. + return_values_xml.extend(xml) + return ''.join(return_values_xml) + + +def get_variable_details(val, evaluate_full_value=True, to_string=None): + try: + # This should be faster than isinstance (but we have to protect against not having a '__class__' attribute). + is_exception_on_eval = val.__class__ == ExceptionOnEvaluate + except: + is_exception_on_eval = False + + if is_exception_on_eval: + v = val.result + else: + v = val + + _type, type_name, resolver = get_type(v) + type_qualifier = getattr(_type, "__module__", "") + if not evaluate_full_value: + value = DEFAULT_VALUE + else: + try: + str_from_provider = _str_from_providers(v, _type, type_name) + if str_from_provider is not None: + value = str_from_provider + + elif to_string is not None: + value = to_string(v) + + elif hasattr_checked(v, '__class__'): + if v.__class__ == frame_type: + value = pydevd_resolver.frameResolver.get_frame_name(v) + + elif v.__class__ in (list, tuple): + if len(v) > 300: + value = '%s: %s' % (str(v.__class__), '' % (len(v),)) + else: + value = '%s: %s' % (str(v.__class__), v) + else: + try: + cName = str(v.__class__) + if cName.find('.') != -1: + cName = cName.split('.')[-1] + + elif cName.find("'") != -1: # does not have '.' (could be something like ) + cName = cName[cName.index("'") + 1:] + + if cName.endswith("'>"): + cName = cName[:-2] + except: + cName = str(v.__class__) + + value = '%s: %s' % (cName, v) + else: + value = str(v) + except: + try: + value = repr(v) + except: + value = 'Unable to get repr for %s' % v.__class__ + + # fix to work with unicode values + try: + if value.__class__ == bytes: + value = value.decode('utf-8', 'replace') + except TypeError: + pass + + return type_name, type_qualifier, is_exception_on_eval, resolver, value + + +def var_to_xml(val, name, trim_if_too_big=True, additional_in_xml='', evaluate_full_value=True): + """ single variable or dictionary to xml representation """ + + type_name, type_qualifier, is_exception_on_eval, resolver, value = get_variable_details( + val, evaluate_full_value) + + scope = get_var_scope(name, val, '', True) + try: + name = quote(name, '/>_= ') # TODO: Fix PY-5834 without using quote + except: + pass + + xml = ' MAXIMUM_VARIABLE_REPRESENTATION_SIZE and trim_if_too_big: + value = value[0:MAXIMUM_VARIABLE_REPRESENTATION_SIZE] + value += '...' + + xml_value = ' value="%s"' % (make_valid_xml_value(quote(value, '/>_= '))) + else: + xml_value = '' + + if is_exception_on_eval: + xml_container = ' isErrorOnEval="True"' + else: + if resolver is not None: + xml_container = ' isContainer="True"' + else: + xml_container = '' + + if scope: + return ''.join((xml, xml_qualifier, xml_value, xml_container, additional_in_xml, ' scope="', scope, '"', ' />\n')) + else: + return ''.join((xml, xml_qualifier, xml_value, xml_container, additional_in_xml, ' />\n')) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/.gitignore b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/.gitignore new file mode 120000 index 0000000..0af2a3c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/.gitignore @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/10/a8/5b/47e3e962d6ef21884861e0c3754283714fe465fe240730e00118751608 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..925f70e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_eval_cython_wrapper.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_eval_cython_wrapper.cpython-38.pyc new file mode 100644 index 0000000..e5e9080 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_eval_cython_wrapper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_eval_main.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_eval_main.cpython-38.pyc new file mode 100644 index 0000000..8cf5b97 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_eval_main.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_tracing.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_tracing.cpython-38.pyc new file mode 100644 index 0000000..d8bd231 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_frame_tracing.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_modify_bytecode.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_modify_bytecode.cpython-38.pyc new file mode 100644 index 0000000..71f881d Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/__pycache__/pydevd_modify_bytecode.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_cython_wrapper.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_cython_wrapper.py new file mode 120000 index 0000000..622d0c8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_cython_wrapper.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7e/c8/e0/1a0d3b6a154274831edfcb1abd3c8012042d1336fbbd522576380b85f1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py new file mode 100644 index 0000000..bfebea2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py @@ -0,0 +1,48 @@ +import os + +from _pydev_bundle import pydev_log +from _pydevd_bundle.pydevd_trace_dispatch import USING_CYTHON +from _pydevd_bundle.pydevd_constants import USE_CYTHON_FLAG, ENV_FALSE_LOWER_VALUES, \ + ENV_TRUE_LOWER_VALUES, IS_PY36_OR_GREATER, IS_PY38_OR_GREATER, SUPPORT_GEVENT, IS_PYTHON_STACKLESS, \ + PYDEVD_USE_FRAME_EVAL, PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING + +frame_eval_func = None +stop_frame_eval = None +dummy_trace_dispatch = None +clear_thread_local_info = None + +# "NO" means we should not use frame evaluation, 'YES' we should use it (and fail if not there) and unspecified uses if possible. +if ( + PYDEVD_USE_FRAME_EVAL in ENV_FALSE_LOWER_VALUES or + USE_CYTHON_FLAG in ENV_FALSE_LOWER_VALUES or + not USING_CYTHON or + + # Frame eval mode does not work with ipython compatible debugging (this happens because the + # way that frame eval works is run untraced and set tracing only for the frames with + # breakpoints, but ipython compatible debugging creates separate frames for what's logically + # the same frame). + PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING + ): + USING_FRAME_EVAL = False + +elif SUPPORT_GEVENT or (IS_PYTHON_STACKLESS and not IS_PY38_OR_GREATER): + USING_FRAME_EVAL = False + # i.e gevent and frame eval mode don't get along very well. + # https://github.com/microsoft/debugpy/issues/189 + # Same problem with Stackless. + # https://github.com/stackless-dev/stackless/issues/240 + +elif PYDEVD_USE_FRAME_EVAL in ENV_TRUE_LOWER_VALUES: + # Fail if unable to use + from _pydevd_frame_eval.pydevd_frame_eval_cython_wrapper import frame_eval_func, stop_frame_eval, dummy_trace_dispatch, clear_thread_local_info + USING_FRAME_EVAL = True + +else: + USING_FRAME_EVAL = False + # Try to use if possible + if IS_PY36_OR_GREATER: + try: + from _pydevd_frame_eval.pydevd_frame_eval_cython_wrapper import frame_eval_func, stop_frame_eval, dummy_trace_dispatch, clear_thread_local_info + USING_FRAME_EVAL = True + except ImportError: + pydev_log.show_compile_cython_command_line() diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c new file mode 100644 index 0000000..245e027 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c @@ -0,0 +1,20595 @@ +/* Generated by Cython 0.29.32 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "depends": [ + "_pydevd_frame_eval/release_mem.h" + ], + "include_dirs": [ + "_pydevd_frame_eval" + ], + "name": "_pydevd_frame_eval.pydevd_frame_evaluator", + "sources": [ + "_pydevd_frame_eval/pydevd_frame_evaluator.pyx" + ] + }, + "module_name": "_pydevd_frame_eval.pydevd_frame_evaluator" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#include "Python.h" +#if PY_VERSION_HEX >= 0x03090000 +#include "internal/pycore_gc.h" +#include "internal/pycore_interp.h" +#endif + +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_32" +#define CYTHON_HEX_VERSION 0x001D20F0 +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900) + #endif +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif +#elif defined(PY_NOGIL) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_NOGIL 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #ifndef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #if PY_VERSION_HEX >= 0x030B00A4 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #elif !defined(CYTHON_FAST_THREAD_STATE) + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030A0000) + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #if PY_VERSION_HEX >= 0x030B00A4 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if PY_VERSION_HEX >= 0x030B00A1 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL; + PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL; + const char *fn_cstr=NULL; + const char *name_cstr=NULL; + PyCodeObject* co=NULL; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + if (!(kwds=PyDict_New())) goto end; + if (!(argcount=PyLong_FromLong(a))) goto end; + if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end; + if (!(posonlyargcount=PyLong_FromLong(0))) goto end; + if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end; + if (!(kwonlyargcount=PyLong_FromLong(k))) goto end; + if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end; + if (!(nlocals=PyLong_FromLong(l))) goto end; + if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end; + if (!(stacksize=PyLong_FromLong(s))) goto end; + if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end; + if (!(flags=PyLong_FromLong(f))) goto end; + if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end; + if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end; + if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end; + if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end; + if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too; + if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here + if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too; + Py_XDECREF((PyObject*)co); + co = (PyCodeObject*)call_result; + call_result = NULL; + if (0) { + cleanup_code_too: + Py_XDECREF((PyObject*)co); + co = NULL; + } + end: + Py_XDECREF(kwds); + Py_XDECREF(argcount); + Py_XDECREF(posonlyargcount); + Py_XDECREF(kwonlyargcount); + Py_XDECREF(nlocals); + Py_XDECREF(stacksize); + Py_XDECREF(replace); + Py_XDECREF(call_result); + Py_XDECREF(empty); + if (type) { + PyErr_Restore(type, value, traceback); + } + return co; + } +#else + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if defined(PyUnicode_IS_READY) + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #else + #define __Pyx_PyUnicode_READY(op) (0) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) + #if !defined(_USE_MATH_DEFINES) + #define _USE_MATH_DEFINES + #endif +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE___pydevd_frame_eval__pydevd_frame_evaluator +#define __PYX_HAVE_API___pydevd_frame_eval__pydevd_frame_evaluator +/* Early includes */ +#include "frameobject.h" +#include "release_mem.h" +#include "code.h" +#include "pystate.h" +#if PY_VERSION_HEX >= 0x03080000 +#include "internal/pycore_pystate.h" +#endif + +#include "ceval.h" + +#if PY_VERSION_HEX >= 0x03090000 +PyObject * noop(PyFrameObject *frame, int exc) { + return NULL; +} +#define CALL_EvalFrameDefault_38(a, b) noop(a, b) +#define CALL_EvalFrameDefault_39(a, b, c) _PyEval_EvalFrameDefault(a, b, c) +#else +PyObject * noop(PyThreadState* tstate, PyFrameObject *frame, int exc) { + return NULL; +} +#define CALL_EvalFrameDefault_39(a, b, c) noop(a, b, c) +#define CALL_EvalFrameDefault_38(a, b) _PyEval_EvalFrameDefault(a, b) +#endif + +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "_pydevd_frame_eval/pydevd_frame_evaluator.pyx", + "stringsource", + "_pydevd_bundle/pydevd_cython.pxd", +}; + +/*--- Type declarations ---*/ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo; +struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo; +struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo; +struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo; +struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue; + +/* "_pydevd_bundle/pydevd_cython.pxd":1 + * cdef class PyDBAdditionalThreadInfo: # <<<<<<<<<<<<<< + * cdef public int pydev_state + * cdef public object pydev_step_stop # Actually, it's a frame or None + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo { + PyObject_HEAD + int pydev_state; + PyObject *pydev_step_stop; + int pydev_original_step_cmd; + int pydev_step_cmd; + int pydev_notify_kill; + PyObject *pydev_smart_step_stop; + int pydev_django_resolve_frame; + PyObject *pydev_call_from_jinja2; + PyObject *pydev_call_inside_jinja2; + int is_tracing; + PyObject *conditional_breakpoint_exception; + PyObject *pydev_message; + int suspend_type; + int pydev_next_line; + PyObject *pydev_func_name; + int suspended_at_unhandled; + PyObject *trace_suspend_type; + PyObject *top_level_thread_tracer_no_back_frames; + PyObject *top_level_thread_tracer_unhandled; + PyObject *thread_tracer; + PyObject *step_in_initial_location; + int pydev_smart_parent_offset; + int pydev_smart_child_offset; + PyObject *pydev_smart_step_into_variants; + PyObject *target_id_to_smart_step_into_variant; + int pydev_use_scoped_step_frame; +}; + + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":24 + * + * + * cdef class ThreadInfo: # <<<<<<<<<<<<<< + * + * cdef public PyDBAdditionalThreadInfo additional_info + */ +struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo { + PyObject_HEAD + struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_vtab; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *additional_info; + int is_pydevd_thread; + int inside_frame_eval; + int fully_initialized; + PyObject *thread_trace_func; + int _can_create_dummy_thread; + int force_stay_in_untraced_mode; +}; + + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":125 + * + * + * cdef class FuncCodeInfo: # <<<<<<<<<<<<<< + * + * cdef public str co_filename + */ +struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo { + PyObject_HEAD + PyObject *co_filename; + PyObject *co_name; + PyObject *canonical_normalized_filename; + int always_skip_code; + int breakpoint_found; + PyObject *new_code; + int breakpoints_mtime; +}; + + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":316 + * + * + * cdef class _CodeLineInfo: # <<<<<<<<<<<<<< + * + * cdef public dict line_to_offset + */ +struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo { + PyObject_HEAD + PyObject *line_to_offset; + int first_line; + int last_line; +}; + + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":361 + * + * + * cdef class _CacheValue(object): # <<<<<<<<<<<<<< + * + * cdef public object code_obj_py + */ +struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue { + PyObject_HEAD + struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_vtab; + PyObject *code_obj_py; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *code_line_info; + PyObject *breakpoints_hit_at_lines; + PyObject *code_lines_as_set; +}; + + + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":24 + * + * + * cdef class ThreadInfo: # <<<<<<<<<<<<<< + * + * cdef public PyDBAdditionalThreadInfo additional_info + */ + +struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo { + PyObject *(*initialize)(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *, PyFrameObject *); + PyObject *(*initialize_if_possible)(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *); +}; +static struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_vtabptr_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo; + + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":361 + * + * + * cdef class _CacheValue(object): # <<<<<<<<<<<<<< + * + * cdef public object code_obj_py + */ + +struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue { + PyObject *(*compute_force_stay_in_untraced_mode)(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *, PyObject *, int __pyx_skip_dispatch); +}; +static struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_vtabptr_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if CYTHON_FAST_PYCALL + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" +#if PY_VERSION_HEX >= 0x030b00a6 + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif // CYTHON_FAST_PYCALL +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallNoArg.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) +#endif + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyObjectCall2Args.proto */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); + +/* PyIntBinop.proto */ +#if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_AddObjC(PyObject *op1, PyObject *op2, long intval, int inplace, int zerodivision_check); +#else +#define __Pyx_PyInt_AddObjC(op1, op2, intval, inplace, zerodivision_check)\ + (inplace ? PyNumber_InPlaceAdd(op1, op2) : PyNumber_Add(op1, op2)) +#endif + +/* SliceObject.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice( + PyObject* obj, Py_ssize_t cstart, Py_ssize_t cstop, + PyObject** py_start, PyObject** py_stop, PyObject** py_slice, + int has_cstart, int has_cstop, int wraparound); + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* StrEquals.proto */ +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals +#else +#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals +#endif + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* GetTopmostException.proto */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* PyObjectLookupSpecial.proto */ +#if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name) { + PyObject *res; + PyTypeObject *tp = Py_TYPE(obj); +#if PY_MAJOR_VERSION < 3 + if (unlikely(PyInstance_Check(obj))) + return __Pyx_PyObject_GetAttrStr(obj, attr_name); +#endif + res = _PyType_Lookup(tp, attr_name); + if (likely(res)) { + descrgetfunc f = Py_TYPE(res)->tp_descr_get; + if (!f) { + Py_INCREF(res); + } else { + res = f(res, obj, (PyObject *)tp); + } + } else { + PyErr_SetObject(PyExc_AttributeError, attr_name); + } + return res; +} +#else +#define __Pyx_PyObject_LookupSpecial(o,n) __Pyx_PyObject_GetAttrStr(o,n) +#endif + +/* PyObjectSetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL) +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value); +#else +#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n) +#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v) +#endif + +/* None.proto */ +static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname); + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* SwapException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* ArgTypeTest.proto */ +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ + ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\ + __Pyx__ArgTypeTest(obj, type, name, exact)) +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); + +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* IterFinish.proto */ +static CYTHON_INLINE int __Pyx_IterFinish(void); + +/* UnpackItemEndCheck.proto */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* PyObjectGetMethod.proto */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); + +/* PyObjectCallMethod0.proto */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); + +/* RaiseNoneIterError.proto */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +/* UnpackTupleError.proto */ +static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); + +/* UnpackTuple2.proto */ +#define __Pyx_unpack_tuple2(tuple, value1, value2, is_tuple, has_known_size, decref_tuple)\ + (likely(is_tuple || PyTuple_Check(tuple)) ?\ + (likely(has_known_size || PyTuple_GET_SIZE(tuple) == 2) ?\ + __Pyx_unpack_tuple2_exact(tuple, value1, value2, decref_tuple) :\ + (__Pyx_UnpackTupleError(tuple, 2), -1)) :\ + __Pyx_unpack_tuple2_generic(tuple, value1, value2, has_known_size, decref_tuple)) +static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( + PyObject* tuple, PyObject** value1, PyObject** value2, int decref_tuple); +static int __Pyx_unpack_tuple2_generic( + PyObject* tuple, PyObject** value1, PyObject** value2, int has_known_size, int decref_tuple); + +/* dict_iter.proto */ +static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name, + Py_ssize_t* p_orig_length, int* p_is_dict); +static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos, + PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict); + +/* PyDictContains.proto */ +static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { + int result = PyDict_Contains(dict, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* WriteUnraisableException.proto */ +static void __Pyx_WriteUnraisable(const char *name, int clineno, + int lineno, const char *filename, + int full_traceback, int nogil); + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable); + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* SetupReduce.proto */ +static int __Pyx_setup_reduce(PyObject* type_obj); + +/* TypeImport.proto */ +#ifndef __PYX_HAVE_RT_ImportType_proto +#define __PYX_HAVE_RT_ImportType_proto +enum __Pyx_ImportType_CheckSize { + __Pyx_ImportType_CheckSize_Error = 0, + __Pyx_ImportType_CheckSize_Warn = 1, + __Pyx_ImportType_CheckSize_Ignore = 2 +}; +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); +#endif + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* GCCDiagnostics.proto */ +#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_initialize(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyFrameObject *__pyx_v_frame_obj); /* proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_initialize_if_possible(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_compute_force_stay_in_untraced_mode(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_breakpoints, int __pyx_skip_dispatch); /* proto*/ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from '_pydevd_bundle.pydevd_cython' */ +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo = 0; + +/* Module declarations from '_pydevd_frame_eval.pydevd_frame_evaluator' */ +static PyTypeObject *__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo = 0; +static PyTypeObject *__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo = 0; +static PyTypeObject *__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo = 0; +static PyTypeObject *__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue = 0; +static int __pyx_v_18_pydevd_frame_eval_22pydevd_frame_evaluator__code_extra_index; +static int __pyx_v_18_pydevd_frame_eval_22pydevd_frame_evaluator_IS_PY_39_OWNARDS; +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_thread_info(PyFrameObject *); /*proto*/ +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_func_code_info(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *, PyFrameObject *, PyCodeObject *); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_generate_code_with_breakpoints(PyObject *, PyObject *); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytecode_while_frame_eval_38(PyFrameObject *, int); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytecode_while_frame_eval_39(PyThreadState *, PyFrameObject *, int); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle_ThreadInfo__set_state(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *, PyObject *); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle_FuncCodeInfo__set_state(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *, PyObject *); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle__CodeLineInfo__set_state(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *, PyObject *); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle__CacheValue__set_state(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *, PyObject *); /*proto*/ +#define __Pyx_MODULE_NAME "_pydevd_frame_eval.pydevd_frame_evaluator" +extern int __pyx_module_is_main__pydevd_frame_eval__pydevd_frame_evaluator; +int __pyx_module_is_main__pydevd_frame_eval__pydevd_frame_evaluator = 0; + +/* Implementation of '_pydevd_frame_eval.pydevd_frame_evaluator' */ +static PyObject *__pyx_builtin_AttributeError; +static PyObject *__pyx_builtin_min; +static PyObject *__pyx_builtin_max; +static const char __pyx_k_[] = "/"; +static const char __pyx_k__2[] = "\\"; +static const char __pyx_k__3[] = "."; +static const char __pyx_k__5[] = ""; +static const char __pyx_k_arg[] = "arg"; +static const char __pyx_k_dis[] = "dis"; +static const char __pyx_k_get[] = "get"; +static const char __pyx_k_max[] = "max"; +static const char __pyx_k_min[] = "min"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_obj[] = "obj"; +static const char __pyx_k_run[] = "run"; +static const char __pyx_k_sys[] = "sys"; +static const char __pyx_k_call[] = "__call__"; +static const char __pyx_k_dict[] = "__dict__"; +static const char __pyx_k_exec[] = "_exec"; +static const char __pyx_k_exit[] = "__exit__"; +static const char __pyx_k_line[] = "line"; +static const char __pyx_k_main[] = "main"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_cache[] = "_cache"; +static const char __pyx_k_enter[] = "__enter__"; +static const char __pyx_k_event[] = "event"; +static const char __pyx_k_frame[] = "frame"; +static const char __pyx_k_local[] = "local"; +static const char __pyx_k_mtime[] = "mtime"; +static const char __pyx_k_rfind[] = "rfind"; +static const char __pyx_k_state[] = "state"; +static const char __pyx_k_active[] = "_active"; +static const char __pyx_k_call_2[] = "call"; +static const char __pyx_k_f_back[] = "f_back"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_main_2[] = "__main__"; +static const char __pyx_k_offset[] = "offset"; +static const char __pyx_k_pickle[] = "pickle"; +static const char __pyx_k_plugin[] = "plugin"; +static const char __pyx_k_pydevd[] = "pydevd"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_thread[] = "thread"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_f_trace[] = "f_trace"; +static const char __pyx_k_SetTrace[] = "SetTrace"; +static const char __pyx_k_can_skip[] = "can_skip"; +static const char __pyx_k_code_obj[] = "code_obj"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_pyx_type[] = "__pyx_type"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_bootstrap[] = "__bootstrap"; +static const char __pyx_k_decref_py[] = "decref_py"; +static const char __pyx_k_get_ident[] = "_get_ident"; +static const char __pyx_k_last_line[] = "last_line"; +static const char __pyx_k_pyx_state[] = "__pyx_state"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_threading[] = "threading"; +static const char __pyx_k_CacheValue[] = "_CacheValue"; +static const char __pyx_k_ThreadInfo[] = "ThreadInfo"; +static const char __pyx_k_first_line[] = "first_line"; +static const char __pyx_k_global_dbg[] = "global_dbg"; +static const char __pyx_k_issuperset[] = "issuperset"; +static const char __pyx_k_pyx_result[] = "__pyx_result"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_DebugHelper[] = "DebugHelper"; +static const char __pyx_k_PickleError[] = "PickleError"; +static const char __pyx_k_bootstrap_2[] = "_bootstrap"; +static const char __pyx_k_breakpoints[] = "breakpoints"; +static const char __pyx_k_code_obj_py[] = "code_obj_py"; +static const char __pyx_k_get_ident_2[] = "get_ident"; +static const char __pyx_k_thread_info[] = "thread_info"; +static const char __pyx_k_CodeLineInfo[] = "_CodeLineInfo"; +static const char __pyx_k_FuncCodeInfo[] = "FuncCodeInfo"; +static const char __pyx_k_intersection[] = "intersection"; +static const char __pyx_k_pydev_monkey[] = "pydev_monkey"; +static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; +static const char __pyx_k_stringsource[] = "stringsource"; +static const char __pyx_k_version_info[] = "version_info"; +static const char __pyx_k_get_file_type[] = "get_file_type"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_thread_active[] = "_thread_active"; +static const char __pyx_k_AttributeError[] = "AttributeError"; +static const char __pyx_k_code_line_info[] = "code_line_info"; +static const char __pyx_k_current_thread[] = "current_thread"; +static const char __pyx_k_findlinestarts[] = "findlinestarts"; +static const char __pyx_k_line_to_offset[] = "line_to_offset"; +static const char __pyx_k_pydevd_tracing[] = "pydevd_tracing"; +static const char __pyx_k_set_trace_func[] = "set_trace_func"; +static const char __pyx_k_trace_dispatch[] = "trace_dispatch"; +static const char __pyx_k_additional_info[] = "additional_info"; +static const char __pyx_k_bootstrap_inner[] = "__bootstrap_inner"; +static const char __pyx_k_frame_eval_func[] = "frame_eval_func"; +static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_stop_frame_eval[] = "stop_frame_eval"; +static const char __pyx_k_bootstrap_inner_2[] = "_bootstrap_inner"; +static const char __pyx_k_pydevd_file_utils[] = "pydevd_file_utils"; +static const char __pyx_k_signature_factory[] = "signature_factory"; +static const char __pyx_k_thread_local_info[] = "_thread_local_info"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_get_code_line_info[] = "_get_code_line_info"; +static const char __pyx_k_get_thread_info_py[] = "get_thread_info_py"; +static const char __pyx_k_show_return_values[] = "show_return_values"; +static const char __pyx_k_get_cache_file_type[] = "get_cache_file_type"; +static const char __pyx_k_update_globals_dict[] = "update_globals_dict"; +static const char __pyx_k_GlobalDebuggerHolder[] = "GlobalDebuggerHolder"; +static const char __pyx_k_dummy_trace_dispatch[] = "dummy_trace_dispatch"; +static const char __pyx_k_dummy_tracing_holder[] = "dummy_tracing_holder"; +static const char __pyx_k_insert_pydevd_breaks[] = "insert_pydevd_breaks"; +static const char __pyx_k_get_func_code_info_py[] = "get_func_code_info_py"; +static const char __pyx_k_has_plugin_line_breaks[] = "has_plugin_line_breaks"; +static const char __pyx_k_is_pydev_daemon_thread[] = "is_pydev_daemon_thread"; +static const char __pyx_k_clear_thread_local_info[] = "clear_thread_local_info"; +static const char __pyx_k_pyx_unpickle_ThreadInfo[] = "__pyx_unpickle_ThreadInfo"; +static const char __pyx_k_breakpoints_hit_at_lines[] = "breakpoints_hit_at_lines"; +static const char __pyx_k_pyx_unpickle__CacheValue[] = "__pyx_unpickle__CacheValue"; +static const char __pyx_k_pyx_unpickle_FuncCodeInfo[] = "__pyx_unpickle_FuncCodeInfo"; +static const char __pyx_k_break_on_caught_exceptions[] = "break_on_caught_exceptions"; +static const char __pyx_k_pyx_unpickle__CodeLineInfo[] = "__pyx_unpickle__CodeLineInfo"; +static const char __pyx_k_get_cached_code_obj_info_py[] = "get_cached_code_obj_info_py"; +static const char __pyx_k_has_plugin_exception_breaks[] = "has_plugin_exception_breaks"; +static const char __pyx_k_NORM_PATHS_AND_BASE_CONTAINER[] = "NORM_PATHS_AND_BASE_CONTAINER"; +static const char __pyx_k_pydevd_bundle_pydevd_constants[] = "_pydevd_bundle.pydevd_constants"; +static const char __pyx_k_pydevd_frame_eval_pydevd_frame[] = "_pydevd_frame_eval.pydevd_frame_tracing"; +static const char __pyx_k_If_a_code_object_is_cached_that[] = "If a code object is cached, that same code object must be reused."; +static const char __pyx_k_get_abs_path_real_path_and_base[] = "get_abs_path_real_path_and_base_from_frame"; +static const char __pyx_k_pydev_bundle__pydev_saved_modul[] = "_pydev_bundle._pydev_saved_modules"; +static const char __pyx_k_pydevd_bundle_pydevd_additional[] = "_pydevd_bundle.pydevd_additional_thread_info"; +static const char __pyx_k_pydevd_bundle_pydevd_trace_disp[] = "_pydevd_bundle.pydevd_trace_dispatch"; +static const char __pyx_k_pydevd_frame_eval_pydevd_modify[] = "_pydevd_frame_eval.pydevd_modify_bytecode"; +static const char __pyx_k_set_additional_thread_info_lock[] = "_set_additional_thread_info_lock"; +static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0x0af4089, 0xe535b68, 0xb8148ba) = (_can_create_dummy_thread, additional_info, force_stay_in_untraced_mode, fully_initialized, inside_frame_eval, is_pydevd_thread, thread_trace_func))"; +static const char __pyx_k_break_on_user_uncaught_exception[] = "break_on_user_uncaught_exceptions"; +static const char __pyx_k_compute_force_stay_in_untraced_m[] = "compute_force_stay_in_untraced_mode"; +static const char __pyx_k_fix_top_level_trace_and_get_trac[] = "fix_top_level_trace_and_get_trace_func"; +static const char __pyx_k_function_breakpoint_name_to_brea[] = "function_breakpoint_name_to_breakpoint"; +static const char __pyx_k_generate_code_with_breakpoints_p[] = "generate_code_with_breakpoints_py"; +static const char __pyx_k_pydevd_frame_eval_pydevd_frame_2[] = "_pydevd_frame_eval/pydevd_frame_evaluator.pyx"; +static const char __pyx_k_pydevd_frame_eval_pydevd_frame_3[] = "_pydevd_frame_eval.pydevd_frame_evaluator"; +static const char __pyx_k_Incompatible_checksums_0x_x_vs_0_2[] = "Incompatible checksums (0x%x vs (0xb3ee05d, 0x450d2d6, 0x956dcaa) = (always_skip_code, breakpoint_found, breakpoints_mtime, canonical_normalized_filename, co_filename, co_name, new_code))"; +static const char __pyx_k_Incompatible_checksums_0x_x_vs_0_3[] = "Incompatible checksums (0x%x vs (0x3fbbd02, 0x5a9bcd5, 0x0267473) = (first_line, last_line, line_to_offset))"; +static const char __pyx_k_Incompatible_checksums_0x_x_vs_0_4[] = "Incompatible checksums (0x%x vs (0x3d481b9, 0xac42a46, 0xedff7c3) = (breakpoints_hit_at_lines, code_line_info, code_lines_as_set, code_obj_py))"; +static PyObject *__pyx_kp_s_; +static PyObject *__pyx_n_s_AttributeError; +static PyObject *__pyx_n_s_CacheValue; +static PyObject *__pyx_n_s_CodeLineInfo; +static PyObject *__pyx_n_s_DebugHelper; +static PyObject *__pyx_n_s_FuncCodeInfo; +static PyObject *__pyx_n_s_GlobalDebuggerHolder; +static PyObject *__pyx_kp_s_If_a_code_object_is_cached_that; +static PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; +static PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2; +static PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_3; +static PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_4; +static PyObject *__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER; +static PyObject *__pyx_n_s_PickleError; +static PyObject *__pyx_n_s_SetTrace; +static PyObject *__pyx_n_s_ThreadInfo; +static PyObject *__pyx_kp_s__2; +static PyObject *__pyx_kp_s__3; +static PyObject *__pyx_kp_s__5; +static PyObject *__pyx_n_s_active; +static PyObject *__pyx_n_s_additional_info; +static PyObject *__pyx_n_s_arg; +static PyObject *__pyx_n_s_bootstrap; +static PyObject *__pyx_n_s_bootstrap_2; +static PyObject *__pyx_n_s_bootstrap_inner; +static PyObject *__pyx_n_s_bootstrap_inner_2; +static PyObject *__pyx_n_s_break_on_caught_exceptions; +static PyObject *__pyx_n_s_break_on_user_uncaught_exception; +static PyObject *__pyx_n_s_breakpoints; +static PyObject *__pyx_n_s_breakpoints_hit_at_lines; +static PyObject *__pyx_n_s_cache; +static PyObject *__pyx_n_s_call; +static PyObject *__pyx_n_s_call_2; +static PyObject *__pyx_n_s_can_skip; +static PyObject *__pyx_n_s_clear_thread_local_info; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_code_line_info; +static PyObject *__pyx_n_s_code_obj; +static PyObject *__pyx_n_s_code_obj_py; +static PyObject *__pyx_n_s_compute_force_stay_in_untraced_m; +static PyObject *__pyx_n_s_current_thread; +static PyObject *__pyx_n_s_decref_py; +static PyObject *__pyx_n_s_dict; +static PyObject *__pyx_n_s_dis; +static PyObject *__pyx_n_s_dummy_trace_dispatch; +static PyObject *__pyx_n_s_dummy_tracing_holder; +static PyObject *__pyx_n_s_enter; +static PyObject *__pyx_n_s_event; +static PyObject *__pyx_n_s_exec; +static PyObject *__pyx_n_s_exit; +static PyObject *__pyx_n_s_f_back; +static PyObject *__pyx_n_s_f_trace; +static PyObject *__pyx_n_s_findlinestarts; +static PyObject *__pyx_n_s_first_line; +static PyObject *__pyx_n_s_fix_top_level_trace_and_get_trac; +static PyObject *__pyx_n_s_frame; +static PyObject *__pyx_n_s_frame_eval_func; +static PyObject *__pyx_n_s_function_breakpoint_name_to_brea; +static PyObject *__pyx_n_s_generate_code_with_breakpoints_p; +static PyObject *__pyx_n_s_get; +static PyObject *__pyx_n_s_get_abs_path_real_path_and_base; +static PyObject *__pyx_n_s_get_cache_file_type; +static PyObject *__pyx_n_s_get_cached_code_obj_info_py; +static PyObject *__pyx_n_s_get_code_line_info; +static PyObject *__pyx_n_s_get_file_type; +static PyObject *__pyx_n_s_get_func_code_info_py; +static PyObject *__pyx_n_s_get_ident; +static PyObject *__pyx_n_s_get_ident_2; +static PyObject *__pyx_n_s_get_thread_info_py; +static PyObject *__pyx_n_s_getstate; +static PyObject *__pyx_n_s_global_dbg; +static PyObject *__pyx_n_s_has_plugin_exception_breaks; +static PyObject *__pyx_n_s_has_plugin_line_breaks; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_insert_pydevd_breaks; +static PyObject *__pyx_n_s_intersection; +static PyObject *__pyx_n_s_is_pydev_daemon_thread; +static PyObject *__pyx_n_s_issuperset; +static PyObject *__pyx_n_s_last_line; +static PyObject *__pyx_n_s_line; +static PyObject *__pyx_n_s_line_to_offset; +static PyObject *__pyx_n_s_local; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_main_2; +static PyObject *__pyx_n_s_max; +static PyObject *__pyx_n_s_min; +static PyObject *__pyx_n_s_mtime; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_n_s_new; +static PyObject *__pyx_n_s_obj; +static PyObject *__pyx_n_s_offset; +static PyObject *__pyx_n_s_pickle; +static PyObject *__pyx_n_s_plugin; +static PyObject *__pyx_n_s_pydev_bundle__pydev_saved_modul; +static PyObject *__pyx_n_s_pydev_monkey; +static PyObject *__pyx_n_s_pydevd; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_additional; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_constants; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_trace_disp; +static PyObject *__pyx_n_s_pydevd_file_utils; +static PyObject *__pyx_n_s_pydevd_frame_eval_pydevd_frame; +static PyObject *__pyx_kp_s_pydevd_frame_eval_pydevd_frame_2; +static PyObject *__pyx_n_s_pydevd_frame_eval_pydevd_frame_3; +static PyObject *__pyx_n_s_pydevd_frame_eval_pydevd_modify; +static PyObject *__pyx_n_s_pydevd_tracing; +static PyObject *__pyx_n_s_pyx_PickleError; +static PyObject *__pyx_n_s_pyx_checksum; +static PyObject *__pyx_n_s_pyx_result; +static PyObject *__pyx_n_s_pyx_state; +static PyObject *__pyx_n_s_pyx_type; +static PyObject *__pyx_n_s_pyx_unpickle_FuncCodeInfo; +static PyObject *__pyx_n_s_pyx_unpickle_ThreadInfo; +static PyObject *__pyx_n_s_pyx_unpickle__CacheValue; +static PyObject *__pyx_n_s_pyx_unpickle__CodeLineInfo; +static PyObject *__pyx_n_s_pyx_vtable; +static PyObject *__pyx_n_s_reduce; +static PyObject *__pyx_n_s_reduce_cython; +static PyObject *__pyx_n_s_reduce_ex; +static PyObject *__pyx_n_s_rfind; +static PyObject *__pyx_n_s_run; +static PyObject *__pyx_n_s_set_additional_thread_info_lock; +static PyObject *__pyx_n_s_set_trace_func; +static PyObject *__pyx_n_s_setstate; +static PyObject *__pyx_n_s_setstate_cython; +static PyObject *__pyx_n_s_show_return_values; +static PyObject *__pyx_n_s_signature_factory; +static PyObject *__pyx_n_s_state; +static PyObject *__pyx_n_s_stop_frame_eval; +static PyObject *__pyx_kp_s_stringsource; +static PyObject *__pyx_n_s_sys; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_thread; +static PyObject *__pyx_n_s_thread_active; +static PyObject *__pyx_n_s_thread_info; +static PyObject *__pyx_n_s_thread_local_info; +static PyObject *__pyx_n_s_threading; +static PyObject *__pyx_n_s_trace_dispatch; +static PyObject *__pyx_n_s_update; +static PyObject *__pyx_n_s_update_globals_dict; +static PyObject *__pyx_n_s_version_info; +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_clear_thread_local_info(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo___reduce_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_2__setstate_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo___init__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_2__reduce_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_4__setstate_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_2dummy_trace_dispatch(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_4get_thread_info_py(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_6decref_py(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_obj); /* proto */ +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_8get_func_code_info_py(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_thread_info, PyObject *__pyx_v_frame, PyObject *__pyx_v_code_obj); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo___init__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v_line_to_offset, int __pyx_v_first_line, int __pyx_v_last_line); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_2__reduce_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_4__setstate_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10_get_code_line_info(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_code_obj); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12get_cached_code_obj_info_py(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_code_obj_py); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue___init__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_code_obj_py, struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_code_line_info, PyObject *__pyx_v_breakpoints_hit_at_lines); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_2compute_force_stay_in_untraced_mode(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_breakpoints); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_4__reduce_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_6__setstate_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_14generate_code_with_breakpoints_py(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_code_obj_py, PyObject *__pyx_v_breakpoints); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_16frame_eval_func(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_18stop_frame_eval(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_20__pyx_unpickle_ThreadInfo(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_22__pyx_unpickle_FuncCodeInfo(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_24__pyx_unpickle__CodeLineInfo(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_26__pyx_unpickle__CacheValue(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_int_0; +static PyObject *__pyx_int_1; +static PyObject *__pyx_int_2; +static PyObject *__pyx_int_3; +static PyObject *__pyx_int_9; +static PyObject *__pyx_int_2520179; +static PyObject *__pyx_int_11485321; +static PyObject *__pyx_int_64258489; +static PyObject *__pyx_int_66829570; +static PyObject *__pyx_int_72405718; +static PyObject *__pyx_int_95010005; +static PyObject *__pyx_int_156687530; +static PyObject *__pyx_int_180628038; +static PyObject *__pyx_int_188670045; +static PyObject *__pyx_int_193022138; +static PyObject *__pyx_int_240343912; +static PyObject *__pyx_int_249558979; +static PyObject *__pyx_tuple__4; +static PyObject *__pyx_tuple__6; +static PyObject *__pyx_tuple__7; +static PyObject *__pyx_tuple__8; +static PyObject *__pyx_tuple__9; +static PyObject *__pyx_slice__24; +static PyObject *__pyx_tuple__11; +static PyObject *__pyx_tuple__14; +static PyObject *__pyx_tuple__16; +static PyObject *__pyx_tuple__18; +static PyObject *__pyx_tuple__20; +static PyObject *__pyx_tuple__22; +static PyObject *__pyx_tuple__25; +static PyObject *__pyx_tuple__26; +static PyObject *__pyx_tuple__28; +static PyObject *__pyx_tuple__30; +static PyObject *__pyx_tuple__32; +static PyObject *__pyx_tuple__34; +static PyObject *__pyx_tuple__36; +static PyObject *__pyx_codeobj__10; +static PyObject *__pyx_codeobj__12; +static PyObject *__pyx_codeobj__13; +static PyObject *__pyx_codeobj__15; +static PyObject *__pyx_codeobj__17; +static PyObject *__pyx_codeobj__19; +static PyObject *__pyx_codeobj__21; +static PyObject *__pyx_codeobj__23; +static PyObject *__pyx_codeobj__27; +static PyObject *__pyx_codeobj__29; +static PyObject *__pyx_codeobj__31; +static PyObject *__pyx_codeobj__33; +static PyObject *__pyx_codeobj__35; +static PyObject *__pyx_codeobj__37; +/* Late includes */ + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":19 + * _thread_active = threading._active + * + * def clear_thread_local_info(): # <<<<<<<<<<<<<< + * global _thread_local_info + * _thread_local_info = threading.local() + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_1clear_thread_local_info(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_1clear_thread_local_info = {"clear_thread_local_info", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_1clear_thread_local_info, METH_NOARGS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_1clear_thread_local_info(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("clear_thread_local_info (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_clear_thread_local_info(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_clear_thread_local_info(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("clear_thread_local_info", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":21 + * def clear_thread_local_info(): + * global _thread_local_info + * _thread_local_info = threading.local() # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_threading); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_local); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_2) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_2) : __Pyx_PyObject_CallNoArg(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_thread_local_info, __pyx_t_1) < 0) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":19 + * _thread_active = threading._active + * + * def clear_thread_local_info(): # <<<<<<<<<<<<<< + * global _thread_local_info + * _thread_local_info = threading.local() + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.clear_thread_local_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":39 + * cdef public bint force_stay_in_untraced_mode + * + * cdef initialize(self, PyFrameObject * frame_obj): # <<<<<<<<<<<<<< + * # Places that create a ThreadInfo should verify that + * # a current Python frame is being executed! + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_initialize(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyFrameObject *__pyx_v_frame_obj) { + PyObject *__pyx_v_basename = NULL; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_v_j = NULL; + PyObject *__pyx_v_co_name = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyFrameObject *__pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("initialize", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":42 + * # Places that create a ThreadInfo should verify that + * # a current Python frame is being executed! + * assert frame_obj != NULL # <<<<<<<<<<<<<< + * + * self.additional_info = None + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + if (unlikely(!Py_OptimizeFlag)) { + if (unlikely(!((__pyx_v_frame_obj != NULL) != 0))) { + PyErr_SetNone(PyExc_AssertionError); + __PYX_ERR(0, 42, __pyx_L1_error) + } + } + #endif + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":44 + * assert frame_obj != NULL + * + * self.additional_info = None # <<<<<<<<<<<<<< + * self.is_pydevd_thread = False + * self.inside_frame_eval = 0 + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->additional_info); + __Pyx_DECREF(((PyObject *)__pyx_v_self->additional_info)); + __pyx_v_self->additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)Py_None); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":45 + * + * self.additional_info = None + * self.is_pydevd_thread = False # <<<<<<<<<<<<<< + * self.inside_frame_eval = 0 + * self.fully_initialized = False + */ + __pyx_v_self->is_pydevd_thread = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":46 + * self.additional_info = None + * self.is_pydevd_thread = False + * self.inside_frame_eval = 0 # <<<<<<<<<<<<<< + * self.fully_initialized = False + * self.thread_trace_func = None + */ + __pyx_v_self->inside_frame_eval = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":47 + * self.is_pydevd_thread = False + * self.inside_frame_eval = 0 + * self.fully_initialized = False # <<<<<<<<<<<<<< + * self.thread_trace_func = None + * + */ + __pyx_v_self->fully_initialized = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":48 + * self.inside_frame_eval = 0 + * self.fully_initialized = False + * self.thread_trace_func = None # <<<<<<<<<<<<<< + * + * # Get the root (if it's not a Thread initialized from the threading + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->thread_trace_func); + __Pyx_DECREF(__pyx_v_self->thread_trace_func); + __pyx_v_self->thread_trace_func = Py_None; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":54 + * # otherwise, we have to wait for the threading module itself to + * # create the Thread entry). + * while frame_obj.f_back != NULL: # <<<<<<<<<<<<<< + * frame_obj = frame_obj.f_back + * + */ + while (1) { + __pyx_t_1 = ((__pyx_v_frame_obj->f_back != NULL) != 0); + if (!__pyx_t_1) break; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":55 + * # create the Thread entry). + * while frame_obj.f_back != NULL: + * frame_obj = frame_obj.f_back # <<<<<<<<<<<<<< + * + * basename = frame_obj.f_code.co_filename + */ + __pyx_t_2 = __pyx_v_frame_obj->f_back; + __pyx_v_frame_obj = __pyx_t_2; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":57 + * frame_obj = frame_obj.f_back + * + * basename = frame_obj.f_code.co_filename # <<<<<<<<<<<<<< + * i = basename.rfind('/') + * j = basename.rfind('\\') + */ + __pyx_t_3 = ((PyObject *)__pyx_v_frame_obj->f_code->co_filename); + __Pyx_INCREF(__pyx_t_3); + __pyx_v_basename = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":58 + * + * basename = frame_obj.f_code.co_filename + * i = basename.rfind('/') # <<<<<<<<<<<<<< + * j = basename.rfind('\\') + * if j > i: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_basename, __pyx_n_s_rfind); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 58, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_kp_s_) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_kp_s_); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 58, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_i = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":59 + * basename = frame_obj.f_code.co_filename + * i = basename.rfind('/') + * j = basename.rfind('\\') # <<<<<<<<<<<<<< + * if j > i: + * i = j + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_basename, __pyx_n_s_rfind); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_kp_s__2) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_kp_s__2); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_j = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":60 + * i = basename.rfind('/') + * j = basename.rfind('\\') + * if j > i: # <<<<<<<<<<<<<< + * i = j + * if i >= 0: + */ + __pyx_t_3 = PyObject_RichCompare(__pyx_v_j, __pyx_v_i, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 60, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 60, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":61 + * j = basename.rfind('\\') + * if j > i: + * i = j # <<<<<<<<<<<<<< + * if i >= 0: + * basename = basename[i + 1:] + */ + __Pyx_INCREF(__pyx_v_j); + __Pyx_DECREF_SET(__pyx_v_i, __pyx_v_j); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":60 + * i = basename.rfind('/') + * j = basename.rfind('\\') + * if j > i: # <<<<<<<<<<<<<< + * i = j + * if i >= 0: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":62 + * if j > i: + * i = j + * if i >= 0: # <<<<<<<<<<<<<< + * basename = basename[i + 1:] + * # remove ext + */ + __pyx_t_3 = PyObject_RichCompare(__pyx_v_i, __pyx_int_0, Py_GE); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 62, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 62, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":63 + * i = j + * if i >= 0: + * basename = basename[i + 1:] # <<<<<<<<<<<<<< + * # remove ext + * i = basename.rfind('.') + */ + __pyx_t_3 = __Pyx_PyInt_AddObjC(__pyx_v_i, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_basename, 0, 0, &__pyx_t_3, NULL, NULL, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_basename, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":62 + * if j > i: + * i = j + * if i >= 0: # <<<<<<<<<<<<<< + * basename = basename[i + 1:] + * # remove ext + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":65 + * basename = basename[i + 1:] + * # remove ext + * i = basename.rfind('.') # <<<<<<<<<<<<<< + * if i >= 0: + * basename = basename[:i] + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_basename, __pyx_n_s_rfind); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 65, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_5, __pyx_kp_s__3) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_kp_s__3); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 65, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_i, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":66 + * # remove ext + * i = basename.rfind('.') + * if i >= 0: # <<<<<<<<<<<<<< + * basename = basename[:i] + * + */ + __pyx_t_4 = PyObject_RichCompare(__pyx_v_i, __pyx_int_0, Py_GE); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 66, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":67 + * i = basename.rfind('.') + * if i >= 0: + * basename = basename[:i] # <<<<<<<<<<<<<< + * + * co_name = frame_obj.f_code.co_name + */ + __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_basename, 0, 0, NULL, &__pyx_v_i, NULL, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_basename, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":66 + * # remove ext + * i = basename.rfind('.') + * if i >= 0: # <<<<<<<<<<<<<< + * basename = basename[:i] + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":69 + * basename = basename[:i] + * + * co_name = frame_obj.f_code.co_name # <<<<<<<<<<<<<< + * + * # In these cases we cannot create a dummy thread (an actual + */ + __pyx_t_4 = ((PyObject *)__pyx_v_frame_obj->f_code->co_name); + __Pyx_INCREF(__pyx_t_4); + __pyx_v_co_name = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":73 + * # In these cases we cannot create a dummy thread (an actual + * # thread will be created later or tracing will already be set). + * if basename == 'threading' and co_name in ('__bootstrap', '_bootstrap', '__bootstrap_inner', '_bootstrap_inner'): # <<<<<<<<<<<<<< + * self._can_create_dummy_thread = False + * elif basename == 'pydev_monkey' and co_name == '__call__': + */ + __pyx_t_6 = (__Pyx_PyString_Equals(__pyx_v_basename, __pyx_n_s_threading, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 73, __pyx_L1_error) + if (__pyx_t_6) { + } else { + __pyx_t_1 = __pyx_t_6; + goto __pyx_L9_bool_binop_done; + } + __Pyx_INCREF(__pyx_v_co_name); + __pyx_t_4 = __pyx_v_co_name; + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_bootstrap, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 73, __pyx_L1_error) + if (!__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L11_bool_binop_done; + } + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_bootstrap_2, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 73, __pyx_L1_error) + if (!__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L11_bool_binop_done; + } + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_bootstrap_inner, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 73, __pyx_L1_error) + if (!__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L11_bool_binop_done; + } + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_bootstrap_inner_2, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 73, __pyx_L1_error) + __pyx_t_6 = __pyx_t_7; + __pyx_L11_bool_binop_done:; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_7 = (__pyx_t_6 != 0); + __pyx_t_1 = __pyx_t_7; + __pyx_L9_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":74 + * # thread will be created later or tracing will already be set). + * if basename == 'threading' and co_name in ('__bootstrap', '_bootstrap', '__bootstrap_inner', '_bootstrap_inner'): + * self._can_create_dummy_thread = False # <<<<<<<<<<<<<< + * elif basename == 'pydev_monkey' and co_name == '__call__': + * self._can_create_dummy_thread = False + */ + __pyx_v_self->_can_create_dummy_thread = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":73 + * # In these cases we cannot create a dummy thread (an actual + * # thread will be created later or tracing will already be set). + * if basename == 'threading' and co_name in ('__bootstrap', '_bootstrap', '__bootstrap_inner', '_bootstrap_inner'): # <<<<<<<<<<<<<< + * self._can_create_dummy_thread = False + * elif basename == 'pydev_monkey' and co_name == '__call__': + */ + goto __pyx_L8; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":75 + * if basename == 'threading' and co_name in ('__bootstrap', '_bootstrap', '__bootstrap_inner', '_bootstrap_inner'): + * self._can_create_dummy_thread = False + * elif basename == 'pydev_monkey' and co_name == '__call__': # <<<<<<<<<<<<<< + * self._can_create_dummy_thread = False + * elif basename == 'pydevd' and co_name in ('run', 'main', '_exec'): + */ + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_basename, __pyx_n_s_pydev_monkey, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 75, __pyx_L1_error) + if (__pyx_t_7) { + } else { + __pyx_t_1 = __pyx_t_7; + goto __pyx_L15_bool_binop_done; + } + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_co_name, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 75, __pyx_L1_error) + __pyx_t_1 = __pyx_t_7; + __pyx_L15_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":76 + * self._can_create_dummy_thread = False + * elif basename == 'pydev_monkey' and co_name == '__call__': + * self._can_create_dummy_thread = False # <<<<<<<<<<<<<< + * elif basename == 'pydevd' and co_name in ('run', 'main', '_exec'): + * self._can_create_dummy_thread = False + */ + __pyx_v_self->_can_create_dummy_thread = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":75 + * if basename == 'threading' and co_name in ('__bootstrap', '_bootstrap', '__bootstrap_inner', '_bootstrap_inner'): + * self._can_create_dummy_thread = False + * elif basename == 'pydev_monkey' and co_name == '__call__': # <<<<<<<<<<<<<< + * self._can_create_dummy_thread = False + * elif basename == 'pydevd' and co_name in ('run', 'main', '_exec'): + */ + goto __pyx_L8; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":77 + * elif basename == 'pydev_monkey' and co_name == '__call__': + * self._can_create_dummy_thread = False + * elif basename == 'pydevd' and co_name in ('run', 'main', '_exec'): # <<<<<<<<<<<<<< + * self._can_create_dummy_thread = False + * elif basename == 'pydevd_tracing': + */ + __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_basename, __pyx_n_s_pydevd, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 77, __pyx_L1_error) + if (__pyx_t_7) { + } else { + __pyx_t_1 = __pyx_t_7; + goto __pyx_L17_bool_binop_done; + } + __Pyx_INCREF(__pyx_v_co_name); + __pyx_t_4 = __pyx_v_co_name; + __pyx_t_6 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_run, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 77, __pyx_L1_error) + if (!__pyx_t_6) { + } else { + __pyx_t_7 = __pyx_t_6; + goto __pyx_L19_bool_binop_done; + } + __pyx_t_6 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_main, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 77, __pyx_L1_error) + if (!__pyx_t_6) { + } else { + __pyx_t_7 = __pyx_t_6; + goto __pyx_L19_bool_binop_done; + } + __pyx_t_6 = (__Pyx_PyString_Equals(__pyx_t_4, __pyx_n_s_exec, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 77, __pyx_L1_error) + __pyx_t_7 = __pyx_t_6; + __pyx_L19_bool_binop_done:; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = (__pyx_t_7 != 0); + __pyx_t_1 = __pyx_t_6; + __pyx_L17_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":78 + * self._can_create_dummy_thread = False + * elif basename == 'pydevd' and co_name in ('run', 'main', '_exec'): + * self._can_create_dummy_thread = False # <<<<<<<<<<<<<< + * elif basename == 'pydevd_tracing': + * self._can_create_dummy_thread = False + */ + __pyx_v_self->_can_create_dummy_thread = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":77 + * elif basename == 'pydev_monkey' and co_name == '__call__': + * self._can_create_dummy_thread = False + * elif basename == 'pydevd' and co_name in ('run', 'main', '_exec'): # <<<<<<<<<<<<<< + * self._can_create_dummy_thread = False + * elif basename == 'pydevd_tracing': + */ + goto __pyx_L8; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":79 + * elif basename == 'pydevd' and co_name in ('run', 'main', '_exec'): + * self._can_create_dummy_thread = False + * elif basename == 'pydevd_tracing': # <<<<<<<<<<<<<< + * self._can_create_dummy_thread = False + * else: + */ + __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_basename, __pyx_n_s_pydevd_tracing, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 79, __pyx_L1_error) + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":80 + * self._can_create_dummy_thread = False + * elif basename == 'pydevd_tracing': + * self._can_create_dummy_thread = False # <<<<<<<<<<<<<< + * else: + * self._can_create_dummy_thread = True + */ + __pyx_v_self->_can_create_dummy_thread = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":79 + * elif basename == 'pydevd' and co_name in ('run', 'main', '_exec'): + * self._can_create_dummy_thread = False + * elif basename == 'pydevd_tracing': # <<<<<<<<<<<<<< + * self._can_create_dummy_thread = False + * else: + */ + goto __pyx_L8; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":82 + * self._can_create_dummy_thread = False + * else: + * self._can_create_dummy_thread = True # <<<<<<<<<<<<<< + * + * # print('Can create dummy thread for thread started in: %s %s' % (basename, co_name)) + */ + /*else*/ { + __pyx_v_self->_can_create_dummy_thread = 1; + } + __pyx_L8:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":39 + * cdef public bint force_stay_in_untraced_mode + * + * cdef initialize(self, PyFrameObject * frame_obj): # <<<<<<<<<<<<<< + * # Places that create a ThreadInfo should verify that + * # a current Python frame is being executed! + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.initialize", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_basename); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XDECREF(__pyx_v_j); + __Pyx_XDECREF(__pyx_v_co_name); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":86 + * # print('Can create dummy thread for thread started in: %s %s' % (basename, co_name)) + * + * cdef initialize_if_possible(self): # <<<<<<<<<<<<<< + * # Don't call threading.currentThread because if we're too early in the process + * # we may create a dummy thread. + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_initialize_if_possible(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + PyObject *__pyx_v_thread_ident = NULL; + PyObject *__pyx_v_t = NULL; + PyObject *__pyx_v_additional_info = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + int __pyx_t_18; + int __pyx_t_19; + char const *__pyx_t_20; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("initialize_if_possible", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":89 + * # Don't call threading.currentThread because if we're too early in the process + * # we may create a dummy thread. + * self.inside_frame_eval += 1 # <<<<<<<<<<<<<< + * + * try: + */ + __pyx_v_self->inside_frame_eval = (__pyx_v_self->inside_frame_eval + 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":91 + * self.inside_frame_eval += 1 + * + * try: # <<<<<<<<<<<<<< + * thread_ident = _get_ident() + * t = _thread_active.get(thread_ident) + */ + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":92 + * + * try: + * thread_ident = _get_ident() # <<<<<<<<<<<<<< + * t = _thread_active.get(thread_ident) + * if t is None: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_get_ident); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 92, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 92, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_thread_ident = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":93 + * try: + * thread_ident = _get_ident() + * t = _thread_active.get(thread_ident) # <<<<<<<<<<<<<< + * if t is None: + * if self._can_create_dummy_thread: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_thread_active); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 93, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_get); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 93, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_2) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_2, __pyx_v_thread_ident) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_thread_ident); + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 93, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_t = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":94 + * thread_ident = _get_ident() + * t = _thread_active.get(thread_ident) + * if t is None: # <<<<<<<<<<<<<< + * if self._can_create_dummy_thread: + * # Initialize the dummy thread and set the tracing (both are needed to + */ + __pyx_t_4 = (__pyx_v_t == Py_None); + __pyx_t_5 = (__pyx_t_4 != 0); + if (__pyx_t_5) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":95 + * t = _thread_active.get(thread_ident) + * if t is None: + * if self._can_create_dummy_thread: # <<<<<<<<<<<<<< + * # Initialize the dummy thread and set the tracing (both are needed to + * # actually stop on breakpoints). + */ + __pyx_t_5 = (__pyx_v_self->_can_create_dummy_thread != 0); + if (__pyx_t_5) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":98 + * # Initialize the dummy thread and set the tracing (both are needed to + * # actually stop on breakpoints). + * t = threading.current_thread() # <<<<<<<<<<<<<< + * SetTrace(dummy_trace_dispatch) + * else: + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_threading); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 98, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_current_thread); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 98, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 98, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF_SET(__pyx_v_t, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":99 + * # actually stop on breakpoints). + * t = threading.current_thread() + * SetTrace(dummy_trace_dispatch) # <<<<<<<<<<<<<< + * else: + * return # Cannot initialize until thread becomes active. + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_SetTrace); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 99, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_dummy_trace_dispatch); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 99, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_6, __pyx_t_3) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 99, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":95 + * t = _thread_active.get(thread_ident) + * if t is None: + * if self._can_create_dummy_thread: # <<<<<<<<<<<<<< + * # Initialize the dummy thread and set the tracing (both are needed to + * # actually stop on breakpoints). + */ + goto __pyx_L7; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":101 + * SetTrace(dummy_trace_dispatch) + * else: + * return # Cannot initialize until thread becomes active. # <<<<<<<<<<<<<< + * + * if getattr(t, 'is_pydev_daemon_thread', False): + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L3_return; + } + __pyx_L7:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":94 + * thread_ident = _get_ident() + * t = _thread_active.get(thread_ident) + * if t is None: # <<<<<<<<<<<<<< + * if self._can_create_dummy_thread: + * # Initialize the dummy thread and set the tracing (both are needed to + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":103 + * return # Cannot initialize until thread becomes active. + * + * if getattr(t, 'is_pydev_daemon_thread', False): # <<<<<<<<<<<<<< + * self.is_pydevd_thread = True + * self.fully_initialized = True + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_t, __pyx_n_s_is_pydev_daemon_thread, Py_False); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 103, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 103, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_5) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":104 + * + * if getattr(t, 'is_pydev_daemon_thread', False): + * self.is_pydevd_thread = True # <<<<<<<<<<<<<< + * self.fully_initialized = True + * else: + */ + __pyx_v_self->is_pydevd_thread = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":105 + * if getattr(t, 'is_pydev_daemon_thread', False): + * self.is_pydevd_thread = True + * self.fully_initialized = True # <<<<<<<<<<<<<< + * else: + * try: + */ + __pyx_v_self->fully_initialized = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":103 + * return # Cannot initialize until thread becomes active. + * + * if getattr(t, 'is_pydev_daemon_thread', False): # <<<<<<<<<<<<<< + * self.is_pydevd_thread = True + * self.fully_initialized = True + */ + goto __pyx_L8; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":107 + * self.fully_initialized = True + * else: + * try: # <<<<<<<<<<<<<< + * additional_info = t.additional_info + * if additional_info is None: + */ + /*else*/ { + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_9); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":108 + * else: + * try: + * additional_info = t.additional_info # <<<<<<<<<<<<<< + * if additional_info is None: + * raise AttributeError() + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_t, __pyx_n_s_additional_info); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 108, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_additional_info = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":109 + * try: + * additional_info = t.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + __pyx_t_5 = (__pyx_v_additional_info == Py_None); + __pyx_t_4 = (__pyx_t_5 != 0); + if (unlikely(__pyx_t_4)) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":110 + * additional_info = t.additional_info + * if additional_info is None: + * raise AttributeError() # <<<<<<<<<<<<<< + * except: + * with _set_additional_thread_info_lock: + */ + __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_builtin_AttributeError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 110, __pyx_L9_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":109 + * try: + * additional_info = t.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":107 + * self.fully_initialized = True + * else: + * try: # <<<<<<<<<<<<<< + * additional_info = t.additional_info + * if additional_info is None: + */ + } + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L14_try_end; + __pyx_L9_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":111 + * if additional_info is None: + * raise AttributeError() + * except: # <<<<<<<<<<<<<< + * with _set_additional_thread_info_lock: + * # If it's not there, set it within a lock to avoid any racing + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.initialize_if_possible", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3) < 0) __PYX_ERR(0, 111, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_3); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":112 + * raise AttributeError() + * except: + * with _set_additional_thread_info_lock: # <<<<<<<<<<<<<< + * # If it's not there, set it within a lock to avoid any racing + * # conditions. + */ + /*with:*/ { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_set_additional_thread_info_lock); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 112, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_10 = __Pyx_PyObject_LookupSpecial(__pyx_t_6, __pyx_n_s_exit); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 112, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_12 = __Pyx_PyObject_LookupSpecial(__pyx_t_6, __pyx_n_s_enter); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 112, __pyx_L18_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_13 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_13)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_13); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + __pyx_t_11 = (__pyx_t_13) ? __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_t_13) : __Pyx_PyObject_CallNoArg(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 112, __pyx_L18_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + /*try:*/ { + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_14, &__pyx_t_15, &__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":115 + * # If it's not there, set it within a lock to avoid any racing + * # conditions. + * additional_info = getattr(thread, 'additional_info', None) # <<<<<<<<<<<<<< + * if additional_info is None: + * additional_info = PyDBAdditionalThreadInfo() + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_thread); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 115, __pyx_L24_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_11 = __Pyx_GetAttr3(__pyx_t_6, __pyx_n_s_additional_info, Py_None); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 115, __pyx_L24_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF_SET(__pyx_v_additional_info, __pyx_t_11); + __pyx_t_11 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":116 + * # conditions. + * additional_info = getattr(thread, 'additional_info', None) + * if additional_info is None: # <<<<<<<<<<<<<< + * additional_info = PyDBAdditionalThreadInfo() + * t.additional_info = additional_info + */ + __pyx_t_4 = (__pyx_v_additional_info == Py_None); + __pyx_t_5 = (__pyx_t_4 != 0); + if (__pyx_t_5) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":117 + * additional_info = getattr(thread, 'additional_info', None) + * if additional_info is None: + * additional_info = PyDBAdditionalThreadInfo() # <<<<<<<<<<<<<< + * t.additional_info = additional_info + * self.additional_info = additional_info + */ + __pyx_t_11 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo)); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 117, __pyx_L24_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF_SET(__pyx_v_additional_info, __pyx_t_11); + __pyx_t_11 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":116 + * # conditions. + * additional_info = getattr(thread, 'additional_info', None) + * if additional_info is None: # <<<<<<<<<<<<<< + * additional_info = PyDBAdditionalThreadInfo() + * t.additional_info = additional_info + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":118 + * if additional_info is None: + * additional_info = PyDBAdditionalThreadInfo() + * t.additional_info = additional_info # <<<<<<<<<<<<<< + * self.additional_info = additional_info + * self.fully_initialized = True + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_t, __pyx_n_s_additional_info, __pyx_v_additional_info) < 0) __PYX_ERR(0, 118, __pyx_L24_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":112 + * raise AttributeError() + * except: + * with _set_additional_thread_info_lock: # <<<<<<<<<<<<<< + * # If it's not there, set it within a lock to avoid any racing + * # conditions. + */ + } + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + goto __pyx_L31_try_end; + __pyx_L24_error:; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.initialize_if_possible", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_11, &__pyx_t_6, &__pyx_t_12) < 0) __PYX_ERR(0, 112, __pyx_L26_except_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_13 = PyTuple_Pack(3, __pyx_t_11, __pyx_t_6, __pyx_t_12); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 112, __pyx_L26_except_error) + __Pyx_GOTREF(__pyx_t_13); + __pyx_t_17 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_13, NULL); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 112, __pyx_L26_except_error) + __Pyx_GOTREF(__pyx_t_17); + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_17); + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + if (__pyx_t_5 < 0) __PYX_ERR(0, 112, __pyx_L26_except_error) + __pyx_t_4 = ((!(__pyx_t_5 != 0)) != 0); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ErrRestoreWithState(__pyx_t_11, __pyx_t_6, __pyx_t_12); + __pyx_t_11 = 0; __pyx_t_6 = 0; __pyx_t_12 = 0; + __PYX_ERR(0, 112, __pyx_L26_except_error) + } + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + goto __pyx_L25_exception_handled; + } + __pyx_L26_except_error:; + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16); + goto __pyx_L11_except_error; + __pyx_L25_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16); + __pyx_L31_try_end:; + } + } + /*finally:*/ { + /*normal exit:*/{ + if (__pyx_t_10) { + __pyx_t_16 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_tuple__4, NULL); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 112, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + } + goto __pyx_L23; + } + __pyx_L23:; + } + goto __pyx_L36; + __pyx_L18_error:; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + goto __pyx_L11_except_error; + __pyx_L36:; + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L10_exception_handled; + } + __pyx_L11_except_error:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":107 + * self.fully_initialized = True + * else: + * try: # <<<<<<<<<<<<<< + * additional_info = t.additional_info + * if additional_info is None: + */ + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); + goto __pyx_L4_error; + __pyx_L10_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); + __pyx_L14_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":119 + * additional_info = PyDBAdditionalThreadInfo() + * t.additional_info = additional_info + * self.additional_info = additional_info # <<<<<<<<<<<<<< + * self.fully_initialized = True + * finally: + */ + if (unlikely(!__pyx_v_additional_info)) { __Pyx_RaiseUnboundLocalError("additional_info"); __PYX_ERR(0, 119, __pyx_L4_error) } + if (!(likely(((__pyx_v_additional_info) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_additional_info, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(0, 119, __pyx_L4_error) + __pyx_t_3 = __pyx_v_additional_info; + __Pyx_INCREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_v_self->additional_info); + __Pyx_DECREF(((PyObject *)__pyx_v_self->additional_info)); + __pyx_v_self->additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":120 + * t.additional_info = additional_info + * self.additional_info = additional_info + * self.fully_initialized = True # <<<<<<<<<<<<<< + * finally: + * self.inside_frame_eval -= 1 + */ + __pyx_v_self->fully_initialized = 1; + } + __pyx_L8:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":122 + * self.fully_initialized = True + * finally: + * self.inside_frame_eval -= 1 # <<<<<<<<<<<<<< + * + * + */ + /*finally:*/ { + /*normal exit:*/{ + __pyx_v_self->inside_frame_eval = (__pyx_v_self->inside_frame_eval - 1); + goto __pyx_L5; + } + __pyx_L4_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_9 = 0; __pyx_t_8 = 0; __pyx_t_7 = 0; __pyx_t_10 = 0; __pyx_t_16 = 0; __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_10, &__pyx_t_16, &__pyx_t_15); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_9, &__pyx_t_8, &__pyx_t_7) < 0)) __Pyx_ErrFetch(&__pyx_t_9, &__pyx_t_8, &__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_15); + __pyx_t_18 = __pyx_lineno; __pyx_t_19 = __pyx_clineno; __pyx_t_20 = __pyx_filename; + { + __pyx_v_self->inside_frame_eval = (__pyx_v_self->inside_frame_eval - 1); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_16, __pyx_t_15); + } + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ErrRestore(__pyx_t_9, __pyx_t_8, __pyx_t_7); + __pyx_t_9 = 0; __pyx_t_8 = 0; __pyx_t_7 = 0; __pyx_t_10 = 0; __pyx_t_16 = 0; __pyx_t_15 = 0; + __pyx_lineno = __pyx_t_18; __pyx_clineno = __pyx_t_19; __pyx_filename = __pyx_t_20; + goto __pyx_L1_error; + } + __pyx_L3_return: { + __pyx_t_15 = __pyx_r; + __pyx_r = 0; + __pyx_v_self->inside_frame_eval = (__pyx_v_self->inside_frame_eval - 1); + __pyx_r = __pyx_t_15; + __pyx_t_15 = 0; + goto __pyx_L0; + } + __pyx_L5:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":86 + * # print('Can create dummy thread for thread started in: %s %s' % (basename, co_name)) + * + * cdef initialize_if_possible(self): # <<<<<<<<<<<<<< + * # Don't call threading.currentThread because if we're too early in the process + * # we may create a dummy thread. + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_13); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.initialize_if_possible", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_thread_ident); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_XDECREF(__pyx_v_additional_info); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":26 + * cdef class ThreadInfo: + * + * cdef public PyDBAdditionalThreadInfo additional_info # <<<<<<<<<<<<<< + * cdef public bint is_pydevd_thread + * cdef public int inside_frame_eval + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_self->additional_info)); + __pyx_r = ((PyObject *)__pyx_v_self->additional_info); + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(((__pyx_v_value) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_value, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(0, 26, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->additional_info); + __Pyx_DECREF(((PyObject *)__pyx_v_self->additional_info)); + __pyx_v_self->additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.additional_info.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->additional_info); + __Pyx_DECREF(((PyObject *)__pyx_v_self->additional_info)); + __pyx_v_self->additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":27 + * + * cdef public PyDBAdditionalThreadInfo additional_info + * cdef public bint is_pydevd_thread # <<<<<<<<<<<<<< + * cdef public int inside_frame_eval + * cdef public bint fully_initialized + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->is_pydevd_thread); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 27, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.is_pydevd_thread.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 27, __pyx_L1_error) + __pyx_v_self->is_pydevd_thread = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.is_pydevd_thread.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":28 + * cdef public PyDBAdditionalThreadInfo additional_info + * cdef public bint is_pydevd_thread + * cdef public int inside_frame_eval # <<<<<<<<<<<<<< + * cdef public bint fully_initialized + * cdef public object thread_trace_func + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->inside_frame_eval); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.inside_frame_eval.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 28, __pyx_L1_error) + __pyx_v_self->inside_frame_eval = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.inside_frame_eval.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":29 + * cdef public bint is_pydevd_thread + * cdef public int inside_frame_eval + * cdef public bint fully_initialized # <<<<<<<<<<<<<< + * cdef public object thread_trace_func + * cdef bint _can_create_dummy_thread + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->fully_initialized); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 29, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.fully_initialized.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 29, __pyx_L1_error) + __pyx_v_self->fully_initialized = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.fully_initialized.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":30 + * cdef public int inside_frame_eval + * cdef public bint fully_initialized + * cdef public object thread_trace_func # <<<<<<<<<<<<<< + * cdef bint _can_create_dummy_thread + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->thread_trace_func); + __pyx_r = __pyx_v_self->thread_trace_func; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->thread_trace_func); + __Pyx_DECREF(__pyx_v_self->thread_trace_func); + __pyx_v_self->thread_trace_func = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->thread_trace_func); + __Pyx_DECREF(__pyx_v_self->thread_trace_func); + __pyx_v_self->thread_trace_func = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":37 + * # If True the debugger should not go into trace mode even if the new + * # code for a function is None and there are breakpoints. + * cdef public bint force_stay_in_untraced_mode # <<<<<<<<<<<<<< + * + * cdef initialize(self, PyFrameObject * frame_obj): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->force_stay_in_untraced_mode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 37, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.force_stay_in_untraced_mode.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 37, __pyx_L1_error) + __pyx_v_self->force_stay_in_untraced_mode = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.force_stay_in_untraced_mode.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo___reduce_cython__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo___reduce_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + int __pyx_t_8; + int __pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self._can_create_dummy_thread, self.additional_info, self.force_stay_in_untraced_mode, self.fully_initialized, self.inside_frame_eval, self.is_pydevd_thread, self.thread_trace_func) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->_can_create_dummy_thread); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_self->force_stay_in_untraced_mode); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_self->fully_initialized); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_self->inside_frame_eval); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyBool_FromLong(__pyx_v_self->is_pydevd_thread); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyTuple_New(7); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); + __Pyx_INCREF(((PyObject *)__pyx_v_self->additional_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self->additional_info)); + PyTuple_SET_ITEM(__pyx_t_6, 1, ((PyObject *)__pyx_v_self->additional_info)); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_6, 3, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_6, 4, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_6, 5, __pyx_t_5); + __Pyx_INCREF(__pyx_v_self->thread_trace_func); + __Pyx_GIVEREF(__pyx_v_self->thread_trace_func); + PyTuple_SET_ITEM(__pyx_t_6, 6, __pyx_v_self->thread_trace_func); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_t_3 = 0; + __pyx_t_4 = 0; + __pyx_t_5 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_6); + __pyx_t_6 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self._can_create_dummy_thread, self.additional_info, self.force_stay_in_untraced_mode, self.fully_initialized, self.inside_frame_eval, self.is_pydevd_thread, self.thread_trace_func) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_6 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_v__dict = __pyx_t_6; + __pyx_t_6 = 0; + + /* "(tree fragment)":7 + * state = (self._can_create_dummy_thread, self.additional_info, self.force_stay_in_untraced_mode, self.fully_initialized, self.inside_frame_eval, self.is_pydevd_thread, self.thread_trace_func) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_7 = (__pyx_v__dict != Py_None); + __pyx_t_8 = (__pyx_t_7 != 0); + if (__pyx_t_8) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_v__dict); + __pyx_t_5 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_6); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_5)); + __pyx_t_5 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.additional_info is not None or self.thread_trace_func is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self._can_create_dummy_thread, self.additional_info, self.force_stay_in_untraced_mode, self.fully_initialized, self.inside_frame_eval, self.is_pydevd_thread, self.thread_trace_func) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.additional_info is not None or self.thread_trace_func is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, None), state + */ + /*else*/ { + __pyx_t_7 = (((PyObject *)__pyx_v_self->additional_info) != Py_None); + __pyx_t_9 = (__pyx_t_7 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_8 = __pyx_t_9; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_self->thread_trace_func != Py_None); + __pyx_t_7 = (__pyx_t_9 != 0); + __pyx_t_8 = __pyx_t_7; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_8; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.additional_info is not None or self.thread_trace_func is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, None), state + * else: + */ + __pyx_t_8 = (__pyx_v_use_setstate != 0); + if (__pyx_t_8) { + + /* "(tree fragment)":13 + * use_setstate = self.additional_info is not None or self.thread_trace_func is not None + * if use_setstate: + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle_ThreadInfo); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_11485321); + __Pyx_GIVEREF(__pyx_int_11485321); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_int_11485321); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_6, 2, Py_None); + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_6); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state); + __pyx_t_5 = 0; + __pyx_t_6 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.additional_info is not None or self.thread_trace_func is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, None), state + * else: + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_ThreadInfo__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_ThreadInfo); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_11485321); + __Pyx_GIVEREF(__pyx_int_11485321); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_int_11485321); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); + __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_6 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadInfo__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_2__setstate_cython__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_2__setstate_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_ThreadInfo__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle_ThreadInfo__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_ThreadInfo, (type(self), 0x0af4089, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadInfo__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":139 + * cdef public int breakpoints_mtime + * + * def __init__(self): # <<<<<<<<<<<<<< + * self.co_filename = '' + * self.canonical_normalized_filename = '' + */ + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} + if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__init__", 0))) return -1; + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo___init__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo___init__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":140 + * + * def __init__(self): + * self.co_filename = '' # <<<<<<<<<<<<<< + * self.canonical_normalized_filename = '' + * self.always_skip_code = False + */ + __Pyx_INCREF(__pyx_kp_s__5); + __Pyx_GIVEREF(__pyx_kp_s__5); + __Pyx_GOTREF(__pyx_v_self->co_filename); + __Pyx_DECREF(__pyx_v_self->co_filename); + __pyx_v_self->co_filename = __pyx_kp_s__5; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":141 + * def __init__(self): + * self.co_filename = '' + * self.canonical_normalized_filename = '' # <<<<<<<<<<<<<< + * self.always_skip_code = False + * + */ + __Pyx_INCREF(__pyx_kp_s__5); + __Pyx_GIVEREF(__pyx_kp_s__5); + __Pyx_GOTREF(__pyx_v_self->canonical_normalized_filename); + __Pyx_DECREF(__pyx_v_self->canonical_normalized_filename); + __pyx_v_self->canonical_normalized_filename = __pyx_kp_s__5; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":142 + * self.co_filename = '' + * self.canonical_normalized_filename = '' + * self.always_skip_code = False # <<<<<<<<<<<<<< + * + * # If breakpoints are found but new_code is None, + */ + __pyx_v_self->always_skip_code = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":147 + * # this means we weren't able to actually add the code + * # where needed, so, fallback to tracing. + * self.breakpoint_found = False # <<<<<<<<<<<<<< + * self.new_code = None + * self.breakpoints_mtime = -1 + */ + __pyx_v_self->breakpoint_found = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":148 + * # where needed, so, fallback to tracing. + * self.breakpoint_found = False + * self.new_code = None # <<<<<<<<<<<<<< + * self.breakpoints_mtime = -1 + * + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->new_code); + __Pyx_DECREF(__pyx_v_self->new_code); + __pyx_v_self->new_code = Py_None; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":149 + * self.breakpoint_found = False + * self.new_code = None + * self.breakpoints_mtime = -1 # <<<<<<<<<<<<<< + * + * + */ + __pyx_v_self->breakpoints_mtime = -1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":139 + * cdef public int breakpoints_mtime + * + * def __init__(self): # <<<<<<<<<<<<<< + * self.co_filename = '' + * self.canonical_normalized_filename = '' + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":127 + * cdef class FuncCodeInfo: + * + * cdef public str co_filename # <<<<<<<<<<<<<< + * cdef public str co_name + * cdef public str canonical_normalized_filename + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->co_filename); + __pyx_r = __pyx_v_self->co_filename; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyString_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 127, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->co_filename); + __Pyx_DECREF(__pyx_v_self->co_filename); + __pyx_v_self->co_filename = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo.co_filename.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->co_filename); + __Pyx_DECREF(__pyx_v_self->co_filename); + __pyx_v_self->co_filename = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":128 + * + * cdef public str co_filename + * cdef public str co_name # <<<<<<<<<<<<<< + * cdef public str canonical_normalized_filename + * cdef bint always_skip_code + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->co_name); + __pyx_r = __pyx_v_self->co_name; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyString_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 128, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->co_name); + __Pyx_DECREF(__pyx_v_self->co_name); + __pyx_v_self->co_name = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo.co_name.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->co_name); + __Pyx_DECREF(__pyx_v_self->co_name); + __pyx_v_self->co_name = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":129 + * cdef public str co_filename + * cdef public str co_name + * cdef public str canonical_normalized_filename # <<<<<<<<<<<<<< + * cdef bint always_skip_code + * cdef public bint breakpoint_found + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->canonical_normalized_filename); + __pyx_r = __pyx_v_self->canonical_normalized_filename; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyString_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 129, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->canonical_normalized_filename); + __Pyx_DECREF(__pyx_v_self->canonical_normalized_filename); + __pyx_v_self->canonical_normalized_filename = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo.canonical_normalized_filename.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->canonical_normalized_filename); + __Pyx_DECREF(__pyx_v_self->canonical_normalized_filename); + __pyx_v_self->canonical_normalized_filename = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":131 + * cdef public str canonical_normalized_filename + * cdef bint always_skip_code + * cdef public bint breakpoint_found # <<<<<<<<<<<<<< + * cdef public object new_code + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->breakpoint_found); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo.breakpoint_found.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 131, __pyx_L1_error) + __pyx_v_self->breakpoint_found = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo.breakpoint_found.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":132 + * cdef bint always_skip_code + * cdef public bint breakpoint_found + * cdef public object new_code # <<<<<<<<<<<<<< + * + * # When breakpoints_mtime != PyDb.mtime the validity of breakpoints have + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->new_code); + __pyx_r = __pyx_v_self->new_code; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->new_code); + __Pyx_DECREF(__pyx_v_self->new_code); + __pyx_v_self->new_code = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->new_code); + __Pyx_DECREF(__pyx_v_self->new_code); + __pyx_v_self->new_code = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":137 + * # to be re-evaluated (if invalid a new FuncCodeInfo must be created and + * # tracing can't be disabled for the related frames). + * cdef public int breakpoints_mtime # <<<<<<<<<<<<<< + * + * def __init__(self): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->breakpoints_mtime); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 137, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo.breakpoints_mtime.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 137, __pyx_L1_error) + __pyx_v_self->breakpoints_mtime = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo.breakpoints_mtime.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_2__reduce_cython__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_2__reduce_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.always_skip_code, self.breakpoint_found, self.breakpoints_mtime, self.canonical_normalized_filename, self.co_filename, self.co_name, self.new_code) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->always_skip_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_self->breakpoint_found); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->breakpoints_mtime); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyTuple_New(7); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_3); + __Pyx_INCREF(__pyx_v_self->canonical_normalized_filename); + __Pyx_GIVEREF(__pyx_v_self->canonical_normalized_filename); + PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_v_self->canonical_normalized_filename); + __Pyx_INCREF(__pyx_v_self->co_filename); + __Pyx_GIVEREF(__pyx_v_self->co_filename); + PyTuple_SET_ITEM(__pyx_t_4, 4, __pyx_v_self->co_filename); + __Pyx_INCREF(__pyx_v_self->co_name); + __Pyx_GIVEREF(__pyx_v_self->co_name); + PyTuple_SET_ITEM(__pyx_t_4, 5, __pyx_v_self->co_name); + __Pyx_INCREF(__pyx_v_self->new_code); + __Pyx_GIVEREF(__pyx_v_self->new_code); + PyTuple_SET_ITEM(__pyx_t_4, 6, __pyx_v_self->new_code); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_t_3 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.always_skip_code, self.breakpoint_found, self.breakpoints_mtime, self.canonical_normalized_filename, self.co_filename, self.co_name, self.new_code) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_4 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v__dict = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":7 + * state = (self.always_skip_code, self.breakpoint_found, self.breakpoints_mtime, self.canonical_normalized_filename, self.co_filename, self.co_name, self.new_code) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_5 = (__pyx_v__dict != Py_None); + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v__dict); + __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.canonical_normalized_filename is not None or self.co_filename is not None or self.co_name is not None or self.new_code is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.always_skip_code, self.breakpoint_found, self.breakpoints_mtime, self.canonical_normalized_filename, self.co_filename, self.co_name, self.new_code) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.canonical_normalized_filename is not None or self.co_filename is not None or self.co_name is not None or self.new_code is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, None), state + */ + /*else*/ { + __pyx_t_5 = (__pyx_v_self->canonical_normalized_filename != ((PyObject*)Py_None)); + __pyx_t_7 = (__pyx_t_5 != 0); + if (!__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_7 = (__pyx_v_self->co_filename != ((PyObject*)Py_None)); + __pyx_t_5 = (__pyx_t_7 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_6 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->co_name != ((PyObject*)Py_None)); + __pyx_t_7 = (__pyx_t_5 != 0); + if (!__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_7 = (__pyx_v_self->new_code != Py_None); + __pyx_t_5 = (__pyx_t_7 != 0); + __pyx_t_6 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_6; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.canonical_normalized_filename is not None or self.co_filename is not None or self.co_name is not None or self.new_code is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, None), state + * else: + */ + __pyx_t_6 = (__pyx_v_use_setstate != 0); + if (__pyx_t_6) { + + /* "(tree fragment)":13 + * use_setstate = self.canonical_normalized_filename is not None or self.co_filename is not None or self.co_name is not None or self.new_code is not None + * if use_setstate: + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_FuncCodeInfo); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_188670045); + __Pyx_GIVEREF(__pyx_int_188670045); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_int_188670045); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_4, 2, Py_None); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_4); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); + __pyx_t_3 = 0; + __pyx_t_4 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.canonical_normalized_filename is not None or self.co_filename is not None or self.co_name is not None or self.new_code is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, None), state + * else: + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_FuncCodeInfo__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_pyx_unpickle_FuncCodeInfo); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_188670045); + __Pyx_GIVEREF(__pyx_int_188670045); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_int_188670045); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4); + __pyx_t_2 = 0; + __pyx_t_4 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_FuncCodeInfo__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_4__setstate_cython__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_4__setstate_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_FuncCodeInfo__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle_FuncCodeInfo__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_FuncCodeInfo, (type(self), 0xb3ee05d, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_FuncCodeInfo__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":152 + * + * + * def dummy_trace_dispatch(frame, str event, arg): # <<<<<<<<<<<<<< + * if event == 'call': + * if frame.f_trace is not None: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_3dummy_trace_dispatch(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_3dummy_trace_dispatch = {"dummy_trace_dispatch", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_3dummy_trace_dispatch, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_3dummy_trace_dispatch(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("dummy_trace_dispatch (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("dummy_trace_dispatch", 1, 3, 3, 1); __PYX_ERR(0, 152, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("dummy_trace_dispatch", 1, 3, 3, 2); __PYX_ERR(0, 152, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "dummy_trace_dispatch") < 0)) __PYX_ERR(0, 152, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = ((PyObject*)values[1]); + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("dummy_trace_dispatch", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 152, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.dummy_trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_event), (&PyString_Type), 1, "event", 1))) __PYX_ERR(0, 152, __pyx_L1_error) + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_2dummy_trace_dispatch(__pyx_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_2dummy_trace_dispatch(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("dummy_trace_dispatch", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":153 + * + * def dummy_trace_dispatch(frame, str event, arg): + * if event == 'call': # <<<<<<<<<<<<<< + * if frame.f_trace is not None: + * return frame.f_trace(frame, event, arg) + */ + __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call_2, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 153, __pyx_L1_error) + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":154 + * def dummy_trace_dispatch(frame, str event, arg): + * if event == 'call': + * if frame.f_trace is not None: # <<<<<<<<<<<<<< + * return frame.f_trace(frame, event, arg) + * return None + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 154, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = (__pyx_t_3 != Py_None); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":155 + * if event == 'call': + * if frame.f_trace is not None: + * return frame.f_trace(frame, event, arg) # <<<<<<<<<<<<<< + * return None + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_7 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_6, __pyx_v_arg); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":154 + * def dummy_trace_dispatch(frame, str event, arg): + * if event == 'call': + * if frame.f_trace is not None: # <<<<<<<<<<<<<< + * return frame.f_trace(frame, event, arg) + * return None + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":153 + * + * def dummy_trace_dispatch(frame, str event, arg): + * if event == 'call': # <<<<<<<<<<<<<< + * if frame.f_trace is not None: + * return frame.f_trace(frame, event, arg) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":156 + * if frame.f_trace is not None: + * return frame.f_trace(frame, event, arg) + * return None # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":152 + * + * + * def dummy_trace_dispatch(frame, str event, arg): # <<<<<<<<<<<<<< + * if event == 'call': + * if frame.f_trace is not None: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.dummy_trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":159 + * + * + * def get_thread_info_py() -> ThreadInfo: # <<<<<<<<<<<<<< + * return get_thread_info(PyEval_GetFrame()) + * + */ + +/* Python wrapper */ +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_5get_thread_info_py(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_5get_thread_info_py = {"get_thread_info_py", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_5get_thread_info_py, METH_NOARGS, 0}; +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_5get_thread_info_py(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_thread_info_py (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_4get_thread_info_py(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_4get_thread_info_py(CYTHON_UNUSED PyObject *__pyx_self) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_thread_info_py", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":160 + * + * def get_thread_info_py() -> ThreadInfo: + * return get_thread_info(PyEval_GetFrame()) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(((PyObject *)__pyx_r)); + __pyx_t_1 = ((PyObject *)__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_thread_info(PyEval_GetFrame())); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_t_1); + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":159 + * + * + * def get_thread_info_py() -> ThreadInfo: # <<<<<<<<<<<<<< + * return get_thread_info(PyEval_GetFrame()) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_thread_info_py", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":163 + * + * + * cdef ThreadInfo get_thread_info(PyFrameObject * frame_obj): # <<<<<<<<<<<<<< + * ''' + * Provides thread-related info. + */ + +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_thread_info(PyFrameObject *__pyx_v_frame_obj) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_thread_info = 0; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + int __pyx_t_10; + char const *__pyx_t_11; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_thread_info", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":170 + * ''' + * cdef ThreadInfo thread_info + * try: # <<<<<<<<<<<<<< + * # Note: changing to a `dict[thread.ident] = thread_info` had almost no + * # effect in the performance. + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":173 + * # Note: changing to a `dict[thread.ident] = thread_info` had almost no + * # effect in the performance. + * thread_info = _thread_local_info.thread_info # <<<<<<<<<<<<<< + * except: + * if frame_obj == NULL: + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_thread_local_info); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 173, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_thread_info); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 173, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!(likely(((__pyx_t_5) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_5, __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo))))) __PYX_ERR(0, 173, __pyx_L3_error) + __pyx_v_thread_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":170 + * ''' + * cdef ThreadInfo thread_info + * try: # <<<<<<<<<<<<<< + * # Note: changing to a `dict[thread.ident] = thread_info` had almost no + * # effect in the performance. + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":174 + * # effect in the performance. + * thread_info = _thread_local_info.thread_info + * except: # <<<<<<<<<<<<<< + * if frame_obj == NULL: + * return None + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_thread_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_4, &__pyx_t_6) < 0) __PYX_ERR(0, 174, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_6); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":175 + * thread_info = _thread_local_info.thread_info + * except: + * if frame_obj == NULL: # <<<<<<<<<<<<<< + * return None + * thread_info = ThreadInfo() + */ + __pyx_t_7 = ((__pyx_v_frame_obj == NULL) != 0); + if (__pyx_t_7) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":176 + * except: + * if frame_obj == NULL: + * return None # <<<<<<<<<<<<<< + * thread_info = ThreadInfo() + * thread_info.initialize(frame_obj) + */ + __Pyx_XDECREF(((PyObject *)__pyx_r)); + __pyx_r = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)Py_None); __Pyx_INCREF(Py_None); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L6_except_return; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":175 + * thread_info = _thread_local_info.thread_info + * except: + * if frame_obj == NULL: # <<<<<<<<<<<<<< + * return None + * thread_info = ThreadInfo() + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":177 + * if frame_obj == NULL: + * return None + * thread_info = ThreadInfo() # <<<<<<<<<<<<<< + * thread_info.initialize(frame_obj) + * thread_info.inside_frame_eval += 1 + */ + __pyx_t_8 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo)); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 177, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_XDECREF_SET(__pyx_v_thread_info, ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_t_8)); + __pyx_t_8 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":178 + * return None + * thread_info = ThreadInfo() + * thread_info.initialize(frame_obj) # <<<<<<<<<<<<<< + * thread_info.inside_frame_eval += 1 + * try: + */ + __pyx_t_8 = ((struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_thread_info->__pyx_vtab)->initialize(__pyx_v_thread_info, __pyx_v_frame_obj); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 178, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":179 + * thread_info = ThreadInfo() + * thread_info.initialize(frame_obj) + * thread_info.inside_frame_eval += 1 # <<<<<<<<<<<<<< + * try: + * _thread_local_info.thread_info = thread_info + */ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval + 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":180 + * thread_info.initialize(frame_obj) + * thread_info.inside_frame_eval += 1 + * try: # <<<<<<<<<<<<<< + * _thread_local_info.thread_info = thread_info + * + */ + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":181 + * thread_info.inside_frame_eval += 1 + * try: + * _thread_local_info.thread_info = thread_info # <<<<<<<<<<<<<< + * + * # Note: _code_extra_index is not actually thread-related, + */ + __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_thread_local_info); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 181, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_8); + if (__Pyx_PyObject_SetAttrStr(__pyx_t_8, __pyx_n_s_thread_info, ((PyObject *)__pyx_v_thread_info)) < 0) __PYX_ERR(0, 181, __pyx_L15_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":186 + * # but this is a good point to initialize it. + * global _code_extra_index + * if _code_extra_index == -1: # <<<<<<<<<<<<<< + * _code_extra_index = _PyEval_RequestCodeExtraIndex(release_co_extra) + * + */ + __pyx_t_7 = ((__pyx_v_18_pydevd_frame_eval_22pydevd_frame_evaluator__code_extra_index == -1L) != 0); + if (__pyx_t_7) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":187 + * global _code_extra_index + * if _code_extra_index == -1: + * _code_extra_index = _PyEval_RequestCodeExtraIndex(release_co_extra) # <<<<<<<<<<<<<< + * + * thread_info.initialize_if_possible() + */ + __pyx_v_18_pydevd_frame_eval_22pydevd_frame_evaluator__code_extra_index = ((int)_PyEval_RequestCodeExtraIndex(release_co_extra)); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":186 + * # but this is a good point to initialize it. + * global _code_extra_index + * if _code_extra_index == -1: # <<<<<<<<<<<<<< + * _code_extra_index = _PyEval_RequestCodeExtraIndex(release_co_extra) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":189 + * _code_extra_index = _PyEval_RequestCodeExtraIndex(release_co_extra) + * + * thread_info.initialize_if_possible() # <<<<<<<<<<<<<< + * finally: + * thread_info.inside_frame_eval -= 1 + */ + __pyx_t_8 = ((struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_thread_info->__pyx_vtab)->initialize_if_possible(__pyx_v_thread_info); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 189, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":191 + * thread_info.initialize_if_possible() + * finally: + * thread_info.inside_frame_eval -= 1 # <<<<<<<<<<<<<< + * + * return thread_info + */ + /*finally:*/ { + /*normal exit:*/{ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval - 1); + goto __pyx_L16; + } + __pyx_L15_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14) < 0)) __Pyx_ErrFetch(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __pyx_t_9 = __pyx_lineno; __pyx_t_10 = __pyx_clineno; __pyx_t_11 = __pyx_filename; + { + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval - 1); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_ErrRestore(__pyx_t_12, __pyx_t_13, __pyx_t_14); + __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __pyx_lineno = __pyx_t_9; __pyx_clineno = __pyx_t_10; __pyx_filename = __pyx_t_11; + goto __pyx_L5_except_error; + } + __pyx_L16:; + } + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L4_exception_handled; + } + __pyx_L5_except_error:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":170 + * ''' + * cdef ThreadInfo thread_info + * try: # <<<<<<<<<<<<<< + * # Note: changing to a `dict[thread.ident] = thread_info` had almost no + * # effect in the performance. + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L0; + __pyx_L4_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + __pyx_L8_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":193 + * thread_info.inside_frame_eval -= 1 + * + * return thread_info # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(((PyObject *)__pyx_r)); + __Pyx_INCREF(((PyObject *)__pyx_v_thread_info)); + __pyx_r = __pyx_v_thread_info; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":163 + * + * + * cdef ThreadInfo get_thread_info(PyFrameObject * frame_obj): # <<<<<<<<<<<<<< + * ''' + * Provides thread-related info. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_thread_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_thread_info); + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":196 + * + * + * def decref_py(obj): # <<<<<<<<<<<<<< + * ''' + * Helper to be called from Python. + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_7decref_py(PyObject *__pyx_self, PyObject *__pyx_v_obj); /*proto*/ +static char __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_6decref_py[] = "\n Helper to be called from Python.\n "; +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_7decref_py = {"decref_py", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_7decref_py, METH_O, __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_6decref_py}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_7decref_py(PyObject *__pyx_self, PyObject *__pyx_v_obj) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("decref_py (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_6decref_py(__pyx_self, ((PyObject *)__pyx_v_obj)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_6decref_py(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_obj) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("decref_py", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":200 + * Helper to be called from Python. + * ''' + * Py_DECREF(obj) # <<<<<<<<<<<<<< + * + * + */ + Py_DECREF(__pyx_v_obj); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":196 + * + * + * def decref_py(obj): # <<<<<<<<<<<<<< + * ''' + * Helper to be called from Python. + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":203 + * + * + * def get_func_code_info_py(thread_info, frame, code_obj) -> FuncCodeInfo: # <<<<<<<<<<<<<< + * ''' + * Helper to be called from Python. + */ + +/* Python wrapper */ +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_9get_func_code_info_py(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_8get_func_code_info_py[] = "\n Helper to be called from Python.\n "; +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_9get_func_code_info_py = {"get_func_code_info_py", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_9get_func_code_info_py, METH_VARARGS|METH_KEYWORDS, __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_8get_func_code_info_py}; +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_9get_func_code_info_py(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_thread_info = 0; + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_code_obj = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_func_code_info_py (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_thread_info,&__pyx_n_s_frame,&__pyx_n_s_code_obj,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_thread_info)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("get_func_code_info_py", 1, 3, 3, 1); __PYX_ERR(0, 203, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_code_obj)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("get_func_code_info_py", 1, 3, 3, 2); __PYX_ERR(0, 203, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "get_func_code_info_py") < 0)) __PYX_ERR(0, 203, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_thread_info = values[0]; + __pyx_v_frame = values[1]; + __pyx_v_code_obj = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("get_func_code_info_py", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 203, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_func_code_info_py", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_8get_func_code_info_py(__pyx_self, __pyx_v_thread_info, __pyx_v_frame, __pyx_v_code_obj); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_8get_func_code_info_py(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_thread_info, PyObject *__pyx_v_frame, PyObject *__pyx_v_code_obj) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_func_code_info_py", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":207 + * Helper to be called from Python. + * ''' + * return get_func_code_info( thread_info, frame, code_obj) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(((PyObject *)__pyx_r)); + __pyx_t_1 = ((PyObject *)__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_func_code_info(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_thread_info), ((PyFrameObject *)__pyx_v_frame), ((PyCodeObject *)__pyx_v_code_obj))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 207, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_t_1); + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":203 + * + * + * def get_func_code_info_py(thread_info, frame, code_obj) -> FuncCodeInfo: # <<<<<<<<<<<<<< + * ''' + * Helper to be called from Python. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_func_code_info_py", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":212 + * cdef int _code_extra_index = -1 + * + * cdef FuncCodeInfo get_func_code_info(ThreadInfo thread_info, PyFrameObject * frame_obj, PyCodeObject * code_obj): # <<<<<<<<<<<<<< + * ''' + * Provides code-object related info. + */ + +static struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_func_code_info(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_thread_info, PyFrameObject *__pyx_v_frame_obj, PyCodeObject *__pyx_v_code_obj) { + PyObject *__pyx_v_main_debugger = 0; + PyObject *__pyx_v_extra; + PyObject *__pyx_v_extra_obj; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_func_code_info_obj = NULL; + PyObject *__pyx_v_co_filename = 0; + PyObject *__pyx_v_co_name = 0; + PyObject *__pyx_v_cache_file_type = 0; + PyObject *__pyx_v_cache_file_type_key = 0; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_func_code_info = NULL; + PyObject *__pyx_v_abs_path_real_path_and_base = NULL; + PyObject *__pyx_v_file_type = NULL; + PyObject *__pyx_v_breakpoints = 0; + PyObject *__pyx_v_function_breakpoint = 0; + PyObject *__pyx_v_code_obj_py = 0; + PyObject *__pyx_v_cached_code_obj_info = 0; + PyObject *__pyx_v_breakpoint_found = NULL; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + int __pyx_t_13; + PyObject *(*__pyx_t_14)(PyObject *); + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_func_code_info", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":228 + * # print('get_func_code_info', f_code.co_name, f_code.co_filename) + * + * cdef object main_debugger = GlobalDebuggerHolder.global_dbg # <<<<<<<<<<<<<< + * thread_info.force_stay_in_untraced_mode = False # This is an output value of the function. + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_GlobalDebuggerHolder); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_global_dbg); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_main_debugger = __pyx_t_2; + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":229 + * + * cdef object main_debugger = GlobalDebuggerHolder.global_dbg + * thread_info.force_stay_in_untraced_mode = False # This is an output value of the function. # <<<<<<<<<<<<<< + * + * cdef PyObject * extra + */ + __pyx_v_thread_info->force_stay_in_untraced_mode = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":232 + * + * cdef PyObject * extra + * _PyCode_GetExtra( code_obj, _code_extra_index, & extra) # <<<<<<<<<<<<<< + * if extra is not NULL: + * extra_obj = extra + */ + (void)(_PyCode_GetExtra(((PyObject *)__pyx_v_code_obj), __pyx_v_18_pydevd_frame_eval_22pydevd_frame_evaluator__code_extra_index, (&__pyx_v_extra))); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":233 + * cdef PyObject * extra + * _PyCode_GetExtra( code_obj, _code_extra_index, & extra) + * if extra is not NULL: # <<<<<<<<<<<<<< + * extra_obj = extra + * if extra_obj is not NULL: + */ + __pyx_t_3 = ((__pyx_v_extra != NULL) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":234 + * _PyCode_GetExtra( code_obj, _code_extra_index, & extra) + * if extra is not NULL: + * extra_obj = extra # <<<<<<<<<<<<<< + * if extra_obj is not NULL: + * func_code_info_obj = extra_obj + */ + __pyx_v_extra_obj = ((PyObject *)__pyx_v_extra); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":235 + * if extra is not NULL: + * extra_obj = extra + * if extra_obj is not NULL: # <<<<<<<<<<<<<< + * func_code_info_obj = extra_obj + * if func_code_info_obj.breakpoints_mtime == main_debugger.mtime: + */ + __pyx_t_3 = ((__pyx_v_extra_obj != NULL) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":236 + * extra_obj = extra + * if extra_obj is not NULL: + * func_code_info_obj = extra_obj # <<<<<<<<<<<<<< + * if func_code_info_obj.breakpoints_mtime == main_debugger.mtime: + * # if DEBUG: + */ + __pyx_t_2 = ((PyObject *)__pyx_v_extra_obj); + __Pyx_INCREF(__pyx_t_2); + __pyx_v_func_code_info_obj = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_t_2); + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":237 + * if extra_obj is not NULL: + * func_code_info_obj = extra_obj + * if func_code_info_obj.breakpoints_mtime == main_debugger.mtime: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_func_code_info: matched mtime', f_code.co_name, f_code.co_filename) + */ + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_func_code_info_obj->breakpoints_mtime); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 237, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_mtime); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 237, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_2, __pyx_t_1, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 237, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 237, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":241 + * # print('get_func_code_info: matched mtime', f_code.co_name, f_code.co_filename) + * + * return func_code_info_obj # <<<<<<<<<<<<<< + * + * cdef str co_filename = code_obj.co_filename + */ + __Pyx_XDECREF(((PyObject *)__pyx_r)); + __Pyx_INCREF(((PyObject *)__pyx_v_func_code_info_obj)); + __pyx_r = __pyx_v_func_code_info_obj; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":237 + * if extra_obj is not NULL: + * func_code_info_obj = extra_obj + * if func_code_info_obj.breakpoints_mtime == main_debugger.mtime: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_func_code_info: matched mtime', f_code.co_name, f_code.co_filename) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":235 + * if extra is not NULL: + * extra_obj = extra + * if extra_obj is not NULL: # <<<<<<<<<<<<<< + * func_code_info_obj = extra_obj + * if func_code_info_obj.breakpoints_mtime == main_debugger.mtime: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":233 + * cdef PyObject * extra + * _PyCode_GetExtra( code_obj, _code_extra_index, & extra) + * if extra is not NULL: # <<<<<<<<<<<<<< + * extra_obj = extra + * if extra_obj is not NULL: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":243 + * return func_code_info_obj + * + * cdef str co_filename = code_obj.co_filename # <<<<<<<<<<<<<< + * cdef str co_name = code_obj.co_name + * cdef dict cache_file_type + */ + __pyx_t_4 = ((PyObject *)__pyx_v_code_obj->co_filename); + __Pyx_INCREF(__pyx_t_4); + __pyx_v_co_filename = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":244 + * + * cdef str co_filename = code_obj.co_filename + * cdef str co_name = code_obj.co_name # <<<<<<<<<<<<<< + * cdef dict cache_file_type + * cdef tuple cache_file_type_key + */ + __pyx_t_4 = ((PyObject *)__pyx_v_code_obj->co_name); + __Pyx_INCREF(__pyx_t_4); + __pyx_v_co_name = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":248 + * cdef tuple cache_file_type_key + * + * func_code_info = FuncCodeInfo() # <<<<<<<<<<<<<< + * func_code_info.breakpoints_mtime = main_debugger.mtime + * + */ + __pyx_t_4 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 248, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v_func_code_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":249 + * + * func_code_info = FuncCodeInfo() + * func_code_info.breakpoints_mtime = main_debugger.mtime # <<<<<<<<<<<<<< + * + * func_code_info.co_filename = co_filename + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_mtime); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 249, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_4); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 249, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_func_code_info->breakpoints_mtime = __pyx_t_5; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":251 + * func_code_info.breakpoints_mtime = main_debugger.mtime + * + * func_code_info.co_filename = co_filename # <<<<<<<<<<<<<< + * func_code_info.co_name = co_name + * + */ + __Pyx_INCREF(__pyx_v_co_filename); + __Pyx_GIVEREF(__pyx_v_co_filename); + __Pyx_GOTREF(__pyx_v_func_code_info->co_filename); + __Pyx_DECREF(__pyx_v_func_code_info->co_filename); + __pyx_v_func_code_info->co_filename = __pyx_v_co_filename; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":252 + * + * func_code_info.co_filename = co_filename + * func_code_info.co_name = co_name # <<<<<<<<<<<<<< + * + * if not func_code_info.always_skip_code: + */ + __Pyx_INCREF(__pyx_v_co_name); + __Pyx_GIVEREF(__pyx_v_co_name); + __Pyx_GOTREF(__pyx_v_func_code_info->co_name); + __Pyx_DECREF(__pyx_v_func_code_info->co_name); + __pyx_v_func_code_info->co_name = __pyx_v_co_name; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":254 + * func_code_info.co_name = co_name + * + * if not func_code_info.always_skip_code: # <<<<<<<<<<<<<< + * try: + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[co_filename] + */ + __pyx_t_3 = ((!(__pyx_v_func_code_info->always_skip_code != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":255 + * + * if not func_code_info.always_skip_code: + * try: # <<<<<<<<<<<<<< + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[co_filename] + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_6, &__pyx_t_7, &__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_8); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":256 + * if not func_code_info.always_skip_code: + * try: + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[co_filename] # <<<<<<<<<<<<<< + * except: + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame_obj) + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 256, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __Pyx_PyObject_Dict_GetItem(__pyx_t_4, __pyx_v_co_filename); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 256, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_abs_path_real_path_and_base = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":255 + * + * if not func_code_info.always_skip_code: + * try: # <<<<<<<<<<<<<< + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[co_filename] + * except: + */ + } + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L12_try_end; + __pyx_L7_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":257 + * try: + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[co_filename] + * except: # <<<<<<<<<<<<<< + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame_obj) + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_func_code_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_4, &__pyx_t_2) < 0) __PYX_ERR(0, 257, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_2); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":258 + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[co_filename] + * except: + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame_obj) # <<<<<<<<<<<<<< + * + * func_code_info.canonical_normalized_filename = abs_path_real_path_and_base[1] + */ + __Pyx_GetModuleGlobalName(__pyx_t_10, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 258, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_11 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_11 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_11)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_11); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + } + } + __pyx_t_9 = (__pyx_t_11) ? __Pyx_PyObject_Call2Args(__pyx_t_10, __pyx_t_11, ((PyObject *)__pyx_v_frame_obj)) : __Pyx_PyObject_CallOneArg(__pyx_t_10, ((PyObject *)__pyx_v_frame_obj)); + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 258, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF_SET(__pyx_v_abs_path_real_path_and_base, __pyx_t_9); + __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + goto __pyx_L8_exception_handled; + } + __pyx_L9_except_error:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":255 + * + * if not func_code_info.always_skip_code: + * try: # <<<<<<<<<<<<<< + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[co_filename] + * except: + */ + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_ExceptionReset(__pyx_t_6, __pyx_t_7, __pyx_t_8); + goto __pyx_L1_error; + __pyx_L8_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_ExceptionReset(__pyx_t_6, __pyx_t_7, __pyx_t_8); + __pyx_L12_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":260 + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame_obj) + * + * func_code_info.canonical_normalized_filename = abs_path_real_path_and_base[1] # <<<<<<<<<<<<<< + * + * cache_file_type = main_debugger.get_cache_file_type() + */ + __pyx_t_2 = __Pyx_GetItemInt(__pyx_v_abs_path_real_path_and_base, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 260, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (!(likely(PyString_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_2)->tp_name), 0))) __PYX_ERR(0, 260, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_func_code_info->canonical_normalized_filename); + __Pyx_DECREF(__pyx_v_func_code_info->canonical_normalized_filename); + __pyx_v_func_code_info->canonical_normalized_filename = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":262 + * func_code_info.canonical_normalized_filename = abs_path_real_path_and_base[1] + * + * cache_file_type = main_debugger.get_cache_file_type() # <<<<<<<<<<<<<< + * # Note: this cache key must be the same from PyDB.get_file_type() -- see it for comments + * # on the cache. + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_get_cache_file_type); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 262, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_2 = (__pyx_t_1) ? __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_1) : __Pyx_PyObject_CallNoArg(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 262, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!(likely(PyDict_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_2)->tp_name), 0))) __PYX_ERR(0, 262, __pyx_L1_error) + __pyx_v_cache_file_type = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":265 + * # Note: this cache key must be the same from PyDB.get_file_type() -- see it for comments + * # on the cache. + * cache_file_type_key = (frame_obj.f_code.co_firstlineno, abs_path_real_path_and_base[0], frame_obj.f_code) # <<<<<<<<<<<<<< + * try: + * file_type = cache_file_type[cache_file_type_key] # Make it faster + */ + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_frame_obj->f_code->co_firstlineno); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 265, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_abs_path_real_path_and_base, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 265, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 265, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_4); + __Pyx_INCREF(((PyObject *)__pyx_v_frame_obj->f_code)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_frame_obj->f_code)); + PyTuple_SET_ITEM(__pyx_t_1, 2, ((PyObject *)__pyx_v_frame_obj->f_code)); + __pyx_t_2 = 0; + __pyx_t_4 = 0; + __pyx_v_cache_file_type_key = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":266 + * # on the cache. + * cache_file_type_key = (frame_obj.f_code.co_firstlineno, abs_path_real_path_and_base[0], frame_obj.f_code) + * try: # <<<<<<<<<<<<<< + * file_type = cache_file_type[cache_file_type_key] # Make it faster + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_8, &__pyx_t_7, &__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_6); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":267 + * cache_file_type_key = (frame_obj.f_code.co_firstlineno, abs_path_real_path_and_base[0], frame_obj.f_code) + * try: + * file_type = cache_file_type[cache_file_type_key] # Make it faster # <<<<<<<<<<<<<< + * except: + * file_type = main_debugger.get_file_type(frame_obj, abs_path_real_path_and_base) # we don't want to debug anything related to pydevd + */ + if (unlikely(__pyx_v_cache_file_type == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 267, __pyx_L15_error) + } + __pyx_t_1 = __Pyx_PyDict_GetItem(__pyx_v_cache_file_type, __pyx_v_cache_file_type_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 267, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_file_type = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":266 + * # on the cache. + * cache_file_type_key = (frame_obj.f_code.co_firstlineno, abs_path_real_path_and_base[0], frame_obj.f_code) + * try: # <<<<<<<<<<<<<< + * file_type = cache_file_type[cache_file_type_key] # Make it faster + * except: + */ + } + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L20_try_end; + __pyx_L15_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":268 + * try: + * file_type = cache_file_type[cache_file_type_key] # Make it faster + * except: # <<<<<<<<<<<<<< + * file_type = main_debugger.get_file_type(frame_obj, abs_path_real_path_and_base) # we don't want to debug anything related to pydevd + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_func_code_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_4, &__pyx_t_2) < 0) __PYX_ERR(0, 268, __pyx_L17_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_2); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":269 + * file_type = cache_file_type[cache_file_type_key] # Make it faster + * except: + * file_type = main_debugger.get_file_type(frame_obj, abs_path_real_path_and_base) # we don't want to debug anything related to pydevd # <<<<<<<<<<<<<< + * + * if file_type is not None: + */ + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_get_file_type); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 269, __pyx_L17_except_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_11 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_11 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_11)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_11); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[3] = {__pyx_t_11, ((PyObject *)__pyx_v_frame_obj), __pyx_v_abs_path_real_path_and_base}; + __pyx_t_9 = __Pyx_PyFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 269, __pyx_L17_except_error) + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[3] = {__pyx_t_11, ((PyObject *)__pyx_v_frame_obj), __pyx_v_abs_path_real_path_and_base}; + __pyx_t_9 = __Pyx_PyCFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 269, __pyx_L17_except_error) + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + { + __pyx_t_12 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 269, __pyx_L17_except_error) + __Pyx_GOTREF(__pyx_t_12); + if (__pyx_t_11) { + __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_11); __pyx_t_11 = NULL; + } + __Pyx_INCREF(((PyObject *)__pyx_v_frame_obj)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_frame_obj)); + PyTuple_SET_ITEM(__pyx_t_12, 0+__pyx_t_5, ((PyObject *)__pyx_v_frame_obj)); + __Pyx_INCREF(__pyx_v_abs_path_real_path_and_base); + __Pyx_GIVEREF(__pyx_v_abs_path_real_path_and_base); + PyTuple_SET_ITEM(__pyx_t_12, 1+__pyx_t_5, __pyx_v_abs_path_real_path_and_base); + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_12, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 269, __pyx_L17_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + } + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF_SET(__pyx_v_file_type, __pyx_t_9); + __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + goto __pyx_L16_exception_handled; + } + __pyx_L17_except_error:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":266 + * # on the cache. + * cache_file_type_key = (frame_obj.f_code.co_firstlineno, abs_path_real_path_and_base[0], frame_obj.f_code) + * try: # <<<<<<<<<<<<<< + * file_type = cache_file_type[cache_file_type_key] # Make it faster + * except: + */ + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_7, __pyx_t_6); + goto __pyx_L1_error; + __pyx_L16_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_7, __pyx_t_6); + __pyx_L20_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":271 + * file_type = main_debugger.get_file_type(frame_obj, abs_path_real_path_and_base) # we don't want to debug anything related to pydevd + * + * if file_type is not None: # <<<<<<<<<<<<<< + * func_code_info.always_skip_code = True + * + */ + __pyx_t_3 = (__pyx_v_file_type != Py_None); + __pyx_t_13 = (__pyx_t_3 != 0); + if (__pyx_t_13) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":272 + * + * if file_type is not None: + * func_code_info.always_skip_code = True # <<<<<<<<<<<<<< + * + * if not func_code_info.always_skip_code: + */ + __pyx_v_func_code_info->always_skip_code = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":271 + * file_type = main_debugger.get_file_type(frame_obj, abs_path_real_path_and_base) # we don't want to debug anything related to pydevd + * + * if file_type is not None: # <<<<<<<<<<<<<< + * func_code_info.always_skip_code = True + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":254 + * func_code_info.co_name = co_name + * + * if not func_code_info.always_skip_code: # <<<<<<<<<<<<<< + * try: + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[co_filename] + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":274 + * func_code_info.always_skip_code = True + * + * if not func_code_info.always_skip_code: # <<<<<<<<<<<<<< + * if main_debugger is not None: + * + */ + __pyx_t_13 = ((!(__pyx_v_func_code_info->always_skip_code != 0)) != 0); + if (__pyx_t_13) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":275 + * + * if not func_code_info.always_skip_code: + * if main_debugger is not None: # <<<<<<<<<<<<<< + * + * breakpoints: dict = main_debugger.breakpoints.get(func_code_info.canonical_normalized_filename) + */ + __pyx_t_13 = (__pyx_v_main_debugger != Py_None); + __pyx_t_3 = (__pyx_t_13 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":277 + * if main_debugger is not None: + * + * breakpoints: dict = main_debugger.breakpoints.get(func_code_info.canonical_normalized_filename) # <<<<<<<<<<<<<< + * function_breakpoint: object = main_debugger.function_breakpoint_name_to_breakpoint.get(func_code_info.co_name) + * # print('\n---') + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_breakpoints); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 277, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_get); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 277, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_4, __pyx_v_func_code_info->canonical_normalized_filename) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_func_code_info->canonical_normalized_filename); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 277, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!(likely(PyDict_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_2)->tp_name), 0))) __PYX_ERR(0, 277, __pyx_L1_error) + __pyx_v_breakpoints = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":278 + * + * breakpoints: dict = main_debugger.breakpoints.get(func_code_info.canonical_normalized_filename) + * function_breakpoint: object = main_debugger.function_breakpoint_name_to_breakpoint.get(func_code_info.co_name) # <<<<<<<<<<<<<< + * # print('\n---') + * # print(main_debugger.breakpoints) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_function_breakpoint_name_to_brea); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 278, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_get); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 278, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_2 = (__pyx_t_1) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_1, __pyx_v_func_code_info->co_name) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_func_code_info->co_name); + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 278, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_function_breakpoint = __pyx_t_2; + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":283 + * # print(func_code_info.canonical_normalized_filename) + * # print(main_debugger.breakpoints.get(func_code_info.canonical_normalized_filename)) + * code_obj_py: object = code_obj # <<<<<<<<<<<<<< + * cached_code_obj_info: object = _cache.get(code_obj_py) + * if cached_code_obj_info: + */ + __pyx_t_2 = ((PyObject *)__pyx_v_code_obj); + __Pyx_INCREF(__pyx_t_2); + __pyx_v_code_obj_py = __pyx_t_2; + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":284 + * # print(main_debugger.breakpoints.get(func_code_info.canonical_normalized_filename)) + * code_obj_py: object = code_obj + * cached_code_obj_info: object = _cache.get(code_obj_py) # <<<<<<<<<<<<<< + * if cached_code_obj_info: + * # The cache is for new code objects, so, in this case it's already + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_cache); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 284, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_get); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 284, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_4, __pyx_v_code_obj_py) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_code_obj_py); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 284, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_cached_code_obj_info = __pyx_t_2; + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":285 + * code_obj_py: object = code_obj + * cached_code_obj_info: object = _cache.get(code_obj_py) + * if cached_code_obj_info: # <<<<<<<<<<<<<< + * # The cache is for new code objects, so, in this case it's already + * # using the new code and we can't change it as this is a generator! + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_cached_code_obj_info); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 285, __pyx_L1_error) + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":291 + * # we may not want to go into tracing mode (as would usually happen + * # when the new_code is None). + * func_code_info.new_code = None # <<<<<<<<<<<<<< + * breakpoint_found, thread_info.force_stay_in_untraced_mode = \ + * cached_code_obj_info.compute_force_stay_in_untraced_mode(breakpoints) + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_func_code_info->new_code); + __Pyx_DECREF(__pyx_v_func_code_info->new_code); + __pyx_v_func_code_info->new_code = Py_None; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":293 + * func_code_info.new_code = None + * breakpoint_found, thread_info.force_stay_in_untraced_mode = \ + * cached_code_obj_info.compute_force_stay_in_untraced_mode(breakpoints) # <<<<<<<<<<<<<< + * func_code_info.breakpoint_found = breakpoint_found + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_cached_code_obj_info, __pyx_n_s_compute_force_stay_in_untraced_m); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 293, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_4, __pyx_v_breakpoints) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_breakpoints); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 293, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_2))) || (PyList_CheckExact(__pyx_t_2))) { + PyObject* sequence = __pyx_t_2; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 292, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_1 = PyList_GET_ITEM(sequence, 0); + __pyx_t_4 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_1 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_9 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_14 = Py_TYPE(__pyx_t_9)->tp_iternext; + index = 0; __pyx_t_1 = __pyx_t_14(__pyx_t_9); if (unlikely(!__pyx_t_1)) goto __pyx_L27_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + index = 1; __pyx_t_4 = __pyx_t_14(__pyx_t_9); if (unlikely(!__pyx_t_4)) goto __pyx_L27_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_14(__pyx_t_9), 2) < 0) __PYX_ERR(0, 292, __pyx_L1_error) + __pyx_t_14 = NULL; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L28_unpacking_done; + __pyx_L27_unpacking_failed:; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_14 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 292, __pyx_L1_error) + __pyx_L28_unpacking_done:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":292 + * # when the new_code is None). + * func_code_info.new_code = None + * breakpoint_found, thread_info.force_stay_in_untraced_mode = \ # <<<<<<<<<<<<<< + * cached_code_obj_info.compute_force_stay_in_untraced_mode(breakpoints) + * func_code_info.breakpoint_found = breakpoint_found + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_breakpoint_found = __pyx_t_1; + __pyx_t_1 = 0; + __pyx_v_thread_info->force_stay_in_untraced_mode = __pyx_t_3; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":294 + * breakpoint_found, thread_info.force_stay_in_untraced_mode = \ + * cached_code_obj_info.compute_force_stay_in_untraced_mode(breakpoints) + * func_code_info.breakpoint_found = breakpoint_found # <<<<<<<<<<<<<< + * + * elif function_breakpoint: + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoint_found); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 294, __pyx_L1_error) + __pyx_v_func_code_info->breakpoint_found = __pyx_t_3; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":285 + * code_obj_py: object = code_obj + * cached_code_obj_info: object = _cache.get(code_obj_py) + * if cached_code_obj_info: # <<<<<<<<<<<<<< + * # The cache is for new code objects, so, in this case it's already + * # using the new code and we can't change it as this is a generator! + */ + goto __pyx_L26; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":296 + * func_code_info.breakpoint_found = breakpoint_found + * + * elif function_breakpoint: # <<<<<<<<<<<<<< + * # Go directly into tracing mode + * func_code_info.breakpoint_found = True + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_function_breakpoint); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 296, __pyx_L1_error) + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":298 + * elif function_breakpoint: + * # Go directly into tracing mode + * func_code_info.breakpoint_found = True # <<<<<<<<<<<<<< + * func_code_info.new_code = None + * + */ + __pyx_v_func_code_info->breakpoint_found = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":299 + * # Go directly into tracing mode + * func_code_info.breakpoint_found = True + * func_code_info.new_code = None # <<<<<<<<<<<<<< + * + * elif breakpoints: + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_func_code_info->new_code); + __Pyx_DECREF(__pyx_v_func_code_info->new_code); + __pyx_v_func_code_info->new_code = Py_None; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":296 + * func_code_info.breakpoint_found = breakpoint_found + * + * elif function_breakpoint: # <<<<<<<<<<<<<< + * # Go directly into tracing mode + * func_code_info.breakpoint_found = True + */ + goto __pyx_L26; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":301 + * func_code_info.new_code = None + * + * elif breakpoints: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('found breakpoints', code_obj_py.co_name, breakpoints) + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoints); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 301, __pyx_L1_error) + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":307 + * # Note: new_code can be None if unable to generate. + * # It should automatically put the new code object in the cache. + * breakpoint_found, func_code_info.new_code = generate_code_with_breakpoints(code_obj_py, breakpoints) # <<<<<<<<<<<<<< + * func_code_info.breakpoint_found = breakpoint_found + * + */ + __pyx_t_2 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_generate_code_with_breakpoints(__pyx_v_code_obj_py, __pyx_v_breakpoints); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if ((likely(PyTuple_CheckExact(__pyx_t_2))) || (PyList_CheckExact(__pyx_t_2))) { + PyObject* sequence = __pyx_t_2; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 307, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_1 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_9 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_14 = Py_TYPE(__pyx_t_9)->tp_iternext; + index = 0; __pyx_t_4 = __pyx_t_14(__pyx_t_9); if (unlikely(!__pyx_t_4)) goto __pyx_L29_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + index = 1; __pyx_t_1 = __pyx_t_14(__pyx_t_9); if (unlikely(!__pyx_t_1)) goto __pyx_L29_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_14(__pyx_t_9), 2) < 0) __PYX_ERR(0, 307, __pyx_L1_error) + __pyx_t_14 = NULL; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L30_unpacking_done; + __pyx_L29_unpacking_failed:; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_14 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 307, __pyx_L1_error) + __pyx_L30_unpacking_done:; + } + __pyx_v_breakpoint_found = __pyx_t_4; + __pyx_t_4 = 0; + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_func_code_info->new_code); + __Pyx_DECREF(__pyx_v_func_code_info->new_code); + __pyx_v_func_code_info->new_code = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":308 + * # It should automatically put the new code object in the cache. + * breakpoint_found, func_code_info.new_code = generate_code_with_breakpoints(code_obj_py, breakpoints) + * func_code_info.breakpoint_found = breakpoint_found # <<<<<<<<<<<<<< + * + * Py_INCREF(func_code_info) + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoint_found); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 308, __pyx_L1_error) + __pyx_v_func_code_info->breakpoint_found = __pyx_t_3; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":301 + * func_code_info.new_code = None + * + * elif breakpoints: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('found breakpoints', code_obj_py.co_name, breakpoints) + */ + } + __pyx_L26:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":275 + * + * if not func_code_info.always_skip_code: + * if main_debugger is not None: # <<<<<<<<<<<<<< + * + * breakpoints: dict = main_debugger.breakpoints.get(func_code_info.canonical_normalized_filename) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":274 + * func_code_info.always_skip_code = True + * + * if not func_code_info.always_skip_code: # <<<<<<<<<<<<<< + * if main_debugger is not None: + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":310 + * func_code_info.breakpoint_found = breakpoint_found + * + * Py_INCREF(func_code_info) # <<<<<<<<<<<<<< + * _PyCode_SetExtra( code_obj, _code_extra_index, func_code_info) + * + */ + Py_INCREF(((PyObject *)__pyx_v_func_code_info)); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":311 + * + * Py_INCREF(func_code_info) + * _PyCode_SetExtra( code_obj, _code_extra_index, func_code_info) # <<<<<<<<<<<<<< + * + * return func_code_info + */ + (void)(_PyCode_SetExtra(((PyObject *)__pyx_v_code_obj), __pyx_v_18_pydevd_frame_eval_22pydevd_frame_evaluator__code_extra_index, ((PyObject *)__pyx_v_func_code_info))); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":313 + * _PyCode_SetExtra( code_obj, _code_extra_index, func_code_info) + * + * return func_code_info # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(((PyObject *)__pyx_r)); + __Pyx_INCREF(((PyObject *)__pyx_v_func_code_info)); + __pyx_r = __pyx_v_func_code_info; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":212 + * cdef int _code_extra_index = -1 + * + * cdef FuncCodeInfo get_func_code_info(ThreadInfo thread_info, PyFrameObject * frame_obj, PyCodeObject * code_obj): # <<<<<<<<<<<<<< + * ''' + * Provides code-object related info. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_func_code_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF((PyObject *)__pyx_v_func_code_info_obj); + __Pyx_XDECREF(__pyx_v_co_filename); + __Pyx_XDECREF(__pyx_v_co_name); + __Pyx_XDECREF(__pyx_v_cache_file_type); + __Pyx_XDECREF(__pyx_v_cache_file_type_key); + __Pyx_XDECREF((PyObject *)__pyx_v_func_code_info); + __Pyx_XDECREF(__pyx_v_abs_path_real_path_and_base); + __Pyx_XDECREF(__pyx_v_file_type); + __Pyx_XDECREF(__pyx_v_breakpoints); + __Pyx_XDECREF(__pyx_v_function_breakpoint); + __Pyx_XDECREF(__pyx_v_code_obj_py); + __Pyx_XDECREF(__pyx_v_cached_code_obj_info); + __Pyx_XDECREF(__pyx_v_breakpoint_found); + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":322 + * cdef public int last_line + * + * def __init__(self, dict line_to_offset, int first_line, int last_line): # <<<<<<<<<<<<<< + * self.line_to_offset = line_to_offset + * self.first_line = first_line + */ + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_line_to_offset = 0; + int __pyx_v_first_line; + int __pyx_v_last_line; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_line_to_offset,&__pyx_n_s_first_line,&__pyx_n_s_last_line,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_line_to_offset)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_first_line)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 3, 3, 1); __PYX_ERR(0, 322, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_last_line)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 3, 3, 2); __PYX_ERR(0, 322, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 322, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_line_to_offset = ((PyObject*)values[0]); + __pyx_v_first_line = __Pyx_PyInt_As_int(values[1]); if (unlikely((__pyx_v_first_line == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 322, __pyx_L3_error) + __pyx_v_last_line = __Pyx_PyInt_As_int(values[2]); if (unlikely((__pyx_v_last_line == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 322, __pyx_L3_error) + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 322, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CodeLineInfo.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_line_to_offset), (&PyDict_Type), 1, "line_to_offset", 1))) __PYX_ERR(0, 322, __pyx_L1_error) + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo___init__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self), __pyx_v_line_to_offset, __pyx_v_first_line, __pyx_v_last_line); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo___init__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v_line_to_offset, int __pyx_v_first_line, int __pyx_v_last_line) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":323 + * + * def __init__(self, dict line_to_offset, int first_line, int last_line): + * self.line_to_offset = line_to_offset # <<<<<<<<<<<<<< + * self.first_line = first_line + * self.last_line = last_line + */ + __Pyx_INCREF(__pyx_v_line_to_offset); + __Pyx_GIVEREF(__pyx_v_line_to_offset); + __Pyx_GOTREF(__pyx_v_self->line_to_offset); + __Pyx_DECREF(__pyx_v_self->line_to_offset); + __pyx_v_self->line_to_offset = __pyx_v_line_to_offset; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":324 + * def __init__(self, dict line_to_offset, int first_line, int last_line): + * self.line_to_offset = line_to_offset + * self.first_line = first_line # <<<<<<<<<<<<<< + * self.last_line = last_line + * + */ + __pyx_v_self->first_line = __pyx_v_first_line; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":325 + * self.line_to_offset = line_to_offset + * self.first_line = first_line + * self.last_line = last_line # <<<<<<<<<<<<<< + * + * + */ + __pyx_v_self->last_line = __pyx_v_last_line; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":322 + * cdef public int last_line + * + * def __init__(self, dict line_to_offset, int first_line, int last_line): # <<<<<<<<<<<<<< + * self.line_to_offset = line_to_offset + * self.first_line = first_line + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":318 + * cdef class _CodeLineInfo: + * + * cdef public dict line_to_offset # <<<<<<<<<<<<<< + * cdef public int first_line + * cdef public int last_line + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->line_to_offset); + __pyx_r = __pyx_v_self->line_to_offset; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyDict_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 318, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->line_to_offset); + __Pyx_DECREF(__pyx_v_self->line_to_offset); + __pyx_v_self->line_to_offset = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CodeLineInfo.line_to_offset.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->line_to_offset); + __Pyx_DECREF(__pyx_v_self->line_to_offset); + __pyx_v_self->line_to_offset = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":319 + * + * cdef public dict line_to_offset + * cdef public int first_line # <<<<<<<<<<<<<< + * cdef public int last_line + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->first_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 319, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CodeLineInfo.first_line.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 319, __pyx_L1_error) + __pyx_v_self->first_line = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CodeLineInfo.first_line.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":320 + * cdef public dict line_to_offset + * cdef public int first_line + * cdef public int last_line # <<<<<<<<<<<<<< + * + * def __init__(self, dict line_to_offset, int first_line, int last_line): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->last_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 320, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CodeLineInfo.last_line.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 320, __pyx_L1_error) + __pyx_v_self->last_line = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CodeLineInfo.last_line.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_2__reduce_cython__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_2__reduce_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.first_line, self.last_line, self.line_to_offset) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->first_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->last_line); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); + __Pyx_INCREF(__pyx_v_self->line_to_offset); + __Pyx_GIVEREF(__pyx_v_self->line_to_offset); + PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_v_self->line_to_offset); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.first_line, self.last_line, self.line_to_offset) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_3 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_v__dict = __pyx_t_3; + __pyx_t_3 = 0; + + /* "(tree fragment)":7 + * state = (self.first_line, self.last_line, self.line_to_offset) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_4 = (__pyx_v__dict != Py_None); + __pyx_t_5 = (__pyx_t_4 != 0); + if (__pyx_t_5) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v__dict); + __pyx_t_2 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_2)); + __pyx_t_2 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.line_to_offset is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.first_line, self.last_line, self.line_to_offset) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.line_to_offset is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, None), state + */ + /*else*/ { + __pyx_t_5 = (__pyx_v_self->line_to_offset != ((PyObject*)Py_None)); + __pyx_v_use_setstate = __pyx_t_5; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.line_to_offset is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, None), state + * else: + */ + __pyx_t_5 = (__pyx_v_use_setstate != 0); + if (__pyx_t_5) { + + /* "(tree fragment)":13 + * use_setstate = self.line_to_offset is not None + * if use_setstate: + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_pyx_unpickle__CodeLineInfo); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_66829570); + __Pyx_GIVEREF(__pyx_int_66829570); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_int_66829570); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_3, 2, Py_None); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_2 = 0; + __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.line_to_offset is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, None), state + * else: + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle__CodeLineInfo__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pyx_unpickle__CodeLineInfo); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_66829570); + __Pyx_GIVEREF(__pyx_int_66829570); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_int_66829570); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_v_state); + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_3); + __pyx_t_1 = 0; + __pyx_t_3 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CodeLineInfo.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle__CodeLineInfo__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_4__setstate_cython__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_4__setstate_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle__CodeLineInfo__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle__CodeLineInfo__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle__CodeLineInfo, (type(self), 0x3fbbd02, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle__CodeLineInfo__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CodeLineInfo.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":329 + * + * # Note: this method has a version in pure-python too. + * def _get_code_line_info(code_obj): # <<<<<<<<<<<<<< + * line_to_offset: dict = {} + * first_line: int = None + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_get_code_line_info(PyObject *__pyx_self, PyObject *__pyx_v_code_obj); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_get_code_line_info = {"_get_code_line_info", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_get_code_line_info, METH_O, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_get_code_line_info(PyObject *__pyx_self, PyObject *__pyx_v_code_obj) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_get_code_line_info (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10_get_code_line_info(__pyx_self, ((PyObject *)__pyx_v_code_obj)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_10_get_code_line_info(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_code_obj) { + PyObject *__pyx_v_line_to_offset = 0; + PyObject *__pyx_v_first_line = 0; + PyObject *__pyx_v_last_line = 0; + int __pyx_v_offset; + int __pyx_v_line; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + PyObject *(*__pyx_t_5)(PyObject *); + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *(*__pyx_t_8)(PyObject *); + int __pyx_t_9; + int __pyx_t_10; + int __pyx_t_11; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_get_code_line_info", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":330 + * # Note: this method has a version in pure-python too. + * def _get_code_line_info(code_obj): + * line_to_offset: dict = {} # <<<<<<<<<<<<<< + * first_line: int = None + * last_line: int = None + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 330, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_line_to_offset = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":331 + * def _get_code_line_info(code_obj): + * line_to_offset: dict = {} + * first_line: int = None # <<<<<<<<<<<<<< + * last_line: int = None + * + */ + __Pyx_INCREF(Py_None); + __pyx_v_first_line = Py_None; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":332 + * line_to_offset: dict = {} + * first_line: int = None + * last_line: int = None # <<<<<<<<<<<<<< + * + * cdef int offset + */ + __Pyx_INCREF(Py_None); + __pyx_v_last_line = Py_None; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":337 + * cdef int line + * + * for offset, line in dis.findlinestarts(code_obj): # <<<<<<<<<<<<<< + * line_to_offset[line] = offset + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_dis); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_findlinestarts); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_2) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_2, __pyx_v_code_obj) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_code_obj); + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (likely(PyList_CheckExact(__pyx_t_1)) || PyTuple_CheckExact(__pyx_t_1)) { + __pyx_t_3 = __pyx_t_1; __Pyx_INCREF(__pyx_t_3); __pyx_t_4 = 0; + __pyx_t_5 = NULL; + } else { + __pyx_t_4 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 337, __pyx_L1_error) + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + for (;;) { + if (likely(!__pyx_t_5)) { + if (likely(PyList_CheckExact(__pyx_t_3))) { + if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_3)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 337, __pyx_L1_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + } else { + if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 337, __pyx_L1_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + } + } else { + __pyx_t_1 = __pyx_t_5(__pyx_t_3); + if (unlikely(!__pyx_t_1)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 337, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_1); + } + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 337, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_2 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_2 = PyList_GET_ITEM(sequence, 0); + __pyx_t_6 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_6); + #else + __pyx_t_2 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_7 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_8 = Py_TYPE(__pyx_t_7)->tp_iternext; + index = 0; __pyx_t_2 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_2)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_2); + index = 1; __pyx_t_6 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_6)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_6); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_8(__pyx_t_7), 2) < 0) __PYX_ERR(0, 337, __pyx_L1_error) + __pyx_t_8 = NULL; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L6_unpacking_done; + __pyx_L5_unpacking_failed:; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_8 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 337, __pyx_L1_error) + __pyx_L6_unpacking_done:; + } + __pyx_t_9 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_9 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_10 = __Pyx_PyInt_As_int(__pyx_t_6); if (unlikely((__pyx_t_10 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 337, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_v_offset = __pyx_t_9; + __pyx_v_line = __pyx_t_10; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":338 + * + * for offset, line in dis.findlinestarts(code_obj): + * line_to_offset[line] = offset # <<<<<<<<<<<<<< + * + * if line_to_offset: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_line); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (unlikely(PyDict_SetItem(__pyx_v_line_to_offset, __pyx_t_6, __pyx_t_1) < 0)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":337 + * cdef int line + * + * for offset, line in dis.findlinestarts(code_obj): # <<<<<<<<<<<<<< + * line_to_offset[line] = offset + * + */ + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":340 + * line_to_offset[line] = offset + * + * if line_to_offset: # <<<<<<<<<<<<<< + * first_line = min(line_to_offset) + * last_line = max(line_to_offset) + */ + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_v_line_to_offset); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 340, __pyx_L1_error) + if (__pyx_t_11) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":341 + * + * if line_to_offset: + * first_line = min(line_to_offset) # <<<<<<<<<<<<<< + * last_line = max(line_to_offset) + * return _CodeLineInfo(line_to_offset, first_line, last_line) + */ + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_min, __pyx_v_line_to_offset); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 341, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_first_line, __pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":342 + * if line_to_offset: + * first_line = min(line_to_offset) + * last_line = max(line_to_offset) # <<<<<<<<<<<<<< + * return _CodeLineInfo(line_to_offset, first_line, last_line) + * + */ + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_max, __pyx_v_line_to_offset); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 342, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_last_line, __pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":340 + * line_to_offset[line] = offset + * + * if line_to_offset: # <<<<<<<<<<<<<< + * first_line = min(line_to_offset) + * last_line = max(line_to_offset) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":343 + * first_line = min(line_to_offset) + * last_line = max(line_to_offset) + * return _CodeLineInfo(line_to_offset, first_line, last_line) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 343, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_line_to_offset); + __Pyx_GIVEREF(__pyx_v_line_to_offset); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_line_to_offset); + __Pyx_INCREF(__pyx_v_first_line); + __Pyx_GIVEREF(__pyx_v_first_line); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_first_line); + __Pyx_INCREF(__pyx_v_last_line); + __Pyx_GIVEREF(__pyx_v_last_line); + PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_v_last_line); + __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo), __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 343, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":329 + * + * # Note: this method has a version in pure-python too. + * def _get_code_line_info(code_obj): # <<<<<<<<<<<<<< + * line_to_offset: dict = {} + * first_line: int = None + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._get_code_line_info", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_line_to_offset); + __Pyx_XDECREF(__pyx_v_first_line); + __Pyx_XDECREF(__pyx_v_last_line); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":353 + * _cache: dict = {} + * + * def get_cached_code_obj_info_py(code_obj_py): # <<<<<<<<<<<<<< + * ''' + * :return _CacheValue: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13get_cached_code_obj_info_py(PyObject *__pyx_self, PyObject *__pyx_v_code_obj_py); /*proto*/ +static char __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_12get_cached_code_obj_info_py[] = "\n :return _CacheValue:\n :note: on cython use _cache.get(code_obj_py) directly.\n "; +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_13get_cached_code_obj_info_py = {"get_cached_code_obj_info_py", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13get_cached_code_obj_info_py, METH_O, __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_12get_cached_code_obj_info_py}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13get_cached_code_obj_info_py(PyObject *__pyx_self, PyObject *__pyx_v_code_obj_py) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_cached_code_obj_info_py (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12get_cached_code_obj_info_py(__pyx_self, ((PyObject *)__pyx_v_code_obj_py)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_12get_cached_code_obj_info_py(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_code_obj_py) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_cached_code_obj_info_py", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":358 + * :note: on cython use _cache.get(code_obj_py) directly. + * ''' + * return _cache.get(code_obj_py) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_cache); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 358, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_get); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 358, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_2) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_2, __pyx_v_code_obj_py) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_code_obj_py); + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":353 + * _cache: dict = {} + * + * def get_cached_code_obj_info_py(code_obj_py): # <<<<<<<<<<<<<< + * ''' + * :return _CacheValue: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_cached_code_obj_info_py", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":368 + * cdef public set code_lines_as_set + * + * def __init__(self, object code_obj_py, _CodeLineInfo code_line_info, set breakpoints_hit_at_lines): # <<<<<<<<<<<<<< + * ''' + * :param code_obj_py: + */ + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue___init__[] = "\n :param code_obj_py:\n :param _CodeLineInfo code_line_info:\n :param set[int] breakpoints_hit_at_lines:\n "; +#if CYTHON_UPDATE_DESCRIPTOR_DOC +struct wrapperbase __pyx_wrapperbase_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue___init__; +#endif +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_code_obj_py = 0; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_code_line_info = 0; + PyObject *__pyx_v_breakpoints_hit_at_lines = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_code_obj_py,&__pyx_n_s_code_line_info,&__pyx_n_s_breakpoints_hit_at_lines,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_code_obj_py)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_code_line_info)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 3, 3, 1); __PYX_ERR(0, 368, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_breakpoints_hit_at_lines)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 3, 3, 2); __PYX_ERR(0, 368, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 368, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_code_obj_py = values[0]; + __pyx_v_code_line_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)values[1]); + __pyx_v_breakpoints_hit_at_lines = ((PyObject*)values[2]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 368, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_code_line_info), __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo, 1, "code_line_info", 0))) __PYX_ERR(0, 368, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_breakpoints_hit_at_lines), (&PySet_Type), 1, "breakpoints_hit_at_lines", 1))) __PYX_ERR(0, 368, __pyx_L1_error) + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue___init__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self), __pyx_v_code_obj_py, __pyx_v_code_line_info, __pyx_v_breakpoints_hit_at_lines); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue___init__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_code_obj_py, struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v_code_line_info, PyObject *__pyx_v_breakpoints_hit_at_lines) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":374 + * :param set[int] breakpoints_hit_at_lines: + * ''' + * self.code_obj_py = code_obj_py # <<<<<<<<<<<<<< + * self.code_line_info = code_line_info + * self.breakpoints_hit_at_lines = breakpoints_hit_at_lines + */ + __Pyx_INCREF(__pyx_v_code_obj_py); + __Pyx_GIVEREF(__pyx_v_code_obj_py); + __Pyx_GOTREF(__pyx_v_self->code_obj_py); + __Pyx_DECREF(__pyx_v_self->code_obj_py); + __pyx_v_self->code_obj_py = __pyx_v_code_obj_py; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":375 + * ''' + * self.code_obj_py = code_obj_py + * self.code_line_info = code_line_info # <<<<<<<<<<<<<< + * self.breakpoints_hit_at_lines = breakpoints_hit_at_lines + * self.code_lines_as_set = set(code_line_info.line_to_offset) + */ + __Pyx_INCREF(((PyObject *)__pyx_v_code_line_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_code_line_info)); + __Pyx_GOTREF(__pyx_v_self->code_line_info); + __Pyx_DECREF(((PyObject *)__pyx_v_self->code_line_info)); + __pyx_v_self->code_line_info = __pyx_v_code_line_info; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":376 + * self.code_obj_py = code_obj_py + * self.code_line_info = code_line_info + * self.breakpoints_hit_at_lines = breakpoints_hit_at_lines # <<<<<<<<<<<<<< + * self.code_lines_as_set = set(code_line_info.line_to_offset) + * + */ + __Pyx_INCREF(__pyx_v_breakpoints_hit_at_lines); + __Pyx_GIVEREF(__pyx_v_breakpoints_hit_at_lines); + __Pyx_GOTREF(__pyx_v_self->breakpoints_hit_at_lines); + __Pyx_DECREF(__pyx_v_self->breakpoints_hit_at_lines); + __pyx_v_self->breakpoints_hit_at_lines = __pyx_v_breakpoints_hit_at_lines; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":377 + * self.code_line_info = code_line_info + * self.breakpoints_hit_at_lines = breakpoints_hit_at_lines + * self.code_lines_as_set = set(code_line_info.line_to_offset) # <<<<<<<<<<<<<< + * + * cpdef compute_force_stay_in_untraced_mode(self, breakpoints): + */ + __pyx_t_1 = PySet_New(__pyx_v_code_line_info->line_to_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 377, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->code_lines_as_set); + __Pyx_DECREF(__pyx_v_self->code_lines_as_set); + __pyx_v_self->code_lines_as_set = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":368 + * cdef public set code_lines_as_set + * + * def __init__(self, object code_obj_py, _CodeLineInfo code_line_info, set breakpoints_hit_at_lines): # <<<<<<<<<<<<<< + * ''' + * :param code_obj_py: + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":379 + * self.code_lines_as_set = set(code_line_info.line_to_offset) + * + * cpdef compute_force_stay_in_untraced_mode(self, breakpoints): # <<<<<<<<<<<<<< + * ''' + * :param breakpoints: + */ + +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_3compute_force_stay_in_untraced_mode(PyObject *__pyx_v_self, PyObject *__pyx_v_breakpoints); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_compute_force_stay_in_untraced_mode(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_breakpoints, int __pyx_skip_dispatch) { + int __pyx_v_force_stay_in_untraced_mode; + int __pyx_v_breakpoint_found; + PyObject *__pyx_v_target_breakpoints = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("compute_force_stay_in_untraced_mode", 0); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_compute_force_stay_in_untraced_m); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 379, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_3compute_force_stay_in_untraced_mode)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_breakpoints) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_breakpoints); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 379, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":389 + * cdef set target_breakpoints + * + * force_stay_in_untraced_mode = False # <<<<<<<<<<<<<< + * + * target_breakpoints = self.code_lines_as_set.intersection(breakpoints) + */ + __pyx_v_force_stay_in_untraced_mode = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":391 + * force_stay_in_untraced_mode = False + * + * target_breakpoints = self.code_lines_as_set.intersection(breakpoints) # <<<<<<<<<<<<<< + * breakpoint_found = bool(target_breakpoints) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->code_lines_as_set, __pyx_n_s_intersection); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 391, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_breakpoints) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_breakpoints); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 391, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!(likely(PySet_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "set", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 391, __pyx_L1_error) + __pyx_v_target_breakpoints = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":392 + * + * target_breakpoints = self.code_lines_as_set.intersection(breakpoints) + * breakpoint_found = bool(target_breakpoints) # <<<<<<<<<<<<<< + * + * if not breakpoint_found: + */ + __pyx_t_5 = (__pyx_v_target_breakpoints != Py_None)&&(PySet_GET_SIZE(__pyx_v_target_breakpoints) != 0); + __pyx_v_breakpoint_found = (!(!__pyx_t_5)); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":394 + * breakpoint_found = bool(target_breakpoints) + * + * if not breakpoint_found: # <<<<<<<<<<<<<< + * force_stay_in_untraced_mode = True + * else: + */ + __pyx_t_5 = ((!(__pyx_v_breakpoint_found != 0)) != 0); + if (__pyx_t_5) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":395 + * + * if not breakpoint_found: + * force_stay_in_untraced_mode = True # <<<<<<<<<<<<<< + * else: + * force_stay_in_untraced_mode = self.breakpoints_hit_at_lines.issuperset(set(breakpoints)) + */ + __pyx_v_force_stay_in_untraced_mode = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":394 + * breakpoint_found = bool(target_breakpoints) + * + * if not breakpoint_found: # <<<<<<<<<<<<<< + * force_stay_in_untraced_mode = True + * else: + */ + goto __pyx_L3; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":397 + * force_stay_in_untraced_mode = True + * else: + * force_stay_in_untraced_mode = self.breakpoints_hit_at_lines.issuperset(set(breakpoints)) # <<<<<<<<<<<<<< + * + * return breakpoint_found, force_stay_in_untraced_mode + */ + /*else*/ { + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->breakpoints_hit_at_lines, __pyx_n_s_issuperset); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 397, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PySet_New(__pyx_v_breakpoints); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 397, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_t_3) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 397, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 397, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_force_stay_in_untraced_mode = __pyx_t_5; + } + __pyx_L3:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":399 + * force_stay_in_untraced_mode = self.breakpoints_hit_at_lines.issuperset(set(breakpoints)) + * + * return breakpoint_found, force_stay_in_untraced_mode # <<<<<<<<<<<<<< + * + * def generate_code_with_breakpoints_py(object code_obj_py, dict breakpoints): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_breakpoint_found); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 399, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_force_stay_in_untraced_mode); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 399, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 399, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":379 + * self.code_lines_as_set = set(code_line_info.line_to_offset) + * + * cpdef compute_force_stay_in_untraced_mode(self, breakpoints): # <<<<<<<<<<<<<< + * ''' + * :param breakpoints: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue.compute_force_stay_in_untraced_mode", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_target_breakpoints); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_3compute_force_stay_in_untraced_mode(PyObject *__pyx_v_self, PyObject *__pyx_v_breakpoints); /*proto*/ +static char __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_2compute_force_stay_in_untraced_mode[] = "\n :param breakpoints:\n set(breakpoint_lines) or dict(breakpoint_line->breakpoint info)\n :return tuple(breakpoint_found, force_stay_in_untraced_mode)\n "; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_3compute_force_stay_in_untraced_mode(PyObject *__pyx_v_self, PyObject *__pyx_v_breakpoints) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("compute_force_stay_in_untraced_mode (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_2compute_force_stay_in_untraced_mode(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self), ((PyObject *)__pyx_v_breakpoints)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_2compute_force_stay_in_untraced_mode(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_breakpoints) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("compute_force_stay_in_untraced_mode", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_compute_force_stay_in_untraced_mode(__pyx_v_self, __pyx_v_breakpoints, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 379, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue.compute_force_stay_in_untraced_mode", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":363 + * cdef class _CacheValue(object): + * + * cdef public object code_obj_py # <<<<<<<<<<<<<< + * cdef public _CodeLineInfo code_line_info + * cdef public set breakpoints_hit_at_lines + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->code_obj_py); + __pyx_r = __pyx_v_self->code_obj_py; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->code_obj_py); + __Pyx_DECREF(__pyx_v_self->code_obj_py); + __pyx_v_self->code_obj_py = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->code_obj_py); + __Pyx_DECREF(__pyx_v_self->code_obj_py); + __pyx_v_self->code_obj_py = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":364 + * + * cdef public object code_obj_py + * cdef public _CodeLineInfo code_line_info # <<<<<<<<<<<<<< + * cdef public set breakpoints_hit_at_lines + * cdef public set code_lines_as_set + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_self->code_line_info)); + __pyx_r = ((PyObject *)__pyx_v_self->code_line_info); + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(((__pyx_v_value) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_value, __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo))))) __PYX_ERR(0, 364, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->code_line_info); + __Pyx_DECREF(((PyObject *)__pyx_v_self->code_line_info)); + __pyx_v_self->code_line_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue.code_line_info.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->code_line_info); + __Pyx_DECREF(((PyObject *)__pyx_v_self->code_line_info)); + __pyx_v_self->code_line_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":365 + * cdef public object code_obj_py + * cdef public _CodeLineInfo code_line_info + * cdef public set breakpoints_hit_at_lines # <<<<<<<<<<<<<< + * cdef public set code_lines_as_set + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->breakpoints_hit_at_lines); + __pyx_r = __pyx_v_self->breakpoints_hit_at_lines; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PySet_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "set", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 365, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->breakpoints_hit_at_lines); + __Pyx_DECREF(__pyx_v_self->breakpoints_hit_at_lines); + __pyx_v_self->breakpoints_hit_at_lines = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue.breakpoints_hit_at_lines.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->breakpoints_hit_at_lines); + __Pyx_DECREF(__pyx_v_self->breakpoints_hit_at_lines); + __pyx_v_self->breakpoints_hit_at_lines = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":366 + * cdef public _CodeLineInfo code_line_info + * cdef public set breakpoints_hit_at_lines + * cdef public set code_lines_as_set # <<<<<<<<<<<<<< + * + * def __init__(self, object code_obj_py, _CodeLineInfo code_line_info, set breakpoints_hit_at_lines): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set___get__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set___get__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->code_lines_as_set); + __pyx_r = __pyx_v_self->code_lines_as_set; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_2__set__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_2__set__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PySet_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "set", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 366, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->code_lines_as_set); + __Pyx_DECREF(__pyx_v_self->code_lines_as_set); + __pyx_v_self->code_lines_as_set = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue.code_lines_as_set.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_4__del__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_4__del__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->code_lines_as_set); + __Pyx_DECREF(__pyx_v_self->code_lines_as_set); + __pyx_v_self->code_lines_as_set = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_4__reduce_cython__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_4__reduce_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.breakpoints_hit_at_lines, self.code_line_info, self.code_lines_as_set, self.code_obj_py) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->breakpoints_hit_at_lines); + __Pyx_GIVEREF(__pyx_v_self->breakpoints_hit_at_lines); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->breakpoints_hit_at_lines); + __Pyx_INCREF(((PyObject *)__pyx_v_self->code_line_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self->code_line_info)); + PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_self->code_line_info)); + __Pyx_INCREF(__pyx_v_self->code_lines_as_set); + __Pyx_GIVEREF(__pyx_v_self->code_lines_as_set); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_self->code_lines_as_set); + __Pyx_INCREF(__pyx_v_self->code_obj_py); + __Pyx_GIVEREF(__pyx_v_self->code_obj_py); + PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_v_self->code_obj_py); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.breakpoints_hit_at_lines, self.code_line_info, self.code_lines_as_set, self.code_obj_py) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self.breakpoints_hit_at_lines, self.code_line_info, self.code_lines_as_set, self.code_obj_py) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); + __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.breakpoints_hit_at_lines is not None or self.code_line_info is not None or self.code_lines_as_set is not None or self.code_obj_py is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.breakpoints_hit_at_lines, self.code_line_info, self.code_lines_as_set, self.code_obj_py) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.breakpoints_hit_at_lines is not None or self.code_line_info is not None or self.code_lines_as_set is not None or self.code_obj_py is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, None), state + */ + /*else*/ { + __pyx_t_2 = (__pyx_v_self->breakpoints_hit_at_lines != ((PyObject*)Py_None)); + __pyx_t_5 = (__pyx_t_2 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (((PyObject *)__pyx_v_self->code_line_info) != Py_None); + __pyx_t_2 = (__pyx_t_5 != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_3 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_self->code_lines_as_set != ((PyObject*)Py_None)); + __pyx_t_5 = (__pyx_t_2 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->code_obj_py != Py_None); + __pyx_t_2 = (__pyx_t_5 != 0); + __pyx_t_3 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_3; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.breakpoints_hit_at_lines is not None or self.code_line_info is not None or self.code_lines_as_set is not None or self.code_obj_py is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, None), state + * else: + */ + __pyx_t_3 = (__pyx_v_use_setstate != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":13 + * use_setstate = self.breakpoints_hit_at_lines is not None or self.code_line_info is not None or self.code_lines_as_set is not None or self.code_obj_py is not None + * if use_setstate: + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle__CacheValue); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_64258489); + __Pyx_GIVEREF(__pyx_int_64258489); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_64258489); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.breakpoints_hit_at_lines is not None or self.code_line_info is not None or self.code_lines_as_set is not None or self.code_obj_py is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, None), state + * else: + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle__CacheValue__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle__CacheValue); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_64258489); + __Pyx_GIVEREF(__pyx_int_64258489); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_64258489); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __pyx_t_6 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle__CacheValue__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_6__setstate_cython__(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_6__setstate_cython__(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle__CacheValue__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle__CacheValue__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle__CacheValue, (type(self), 0x3d481b9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle__CacheValue__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":401 + * return breakpoint_found, force_stay_in_untraced_mode + * + * def generate_code_with_breakpoints_py(object code_obj_py, dict breakpoints): # <<<<<<<<<<<<<< + * return generate_code_with_breakpoints(code_obj_py, breakpoints) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_15generate_code_with_breakpoints_py(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_15generate_code_with_breakpoints_py = {"generate_code_with_breakpoints_py", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_15generate_code_with_breakpoints_py, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_15generate_code_with_breakpoints_py(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_code_obj_py = 0; + PyObject *__pyx_v_breakpoints = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("generate_code_with_breakpoints_py (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_code_obj_py,&__pyx_n_s_breakpoints,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_code_obj_py)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_breakpoints)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("generate_code_with_breakpoints_py", 1, 2, 2, 1); __PYX_ERR(0, 401, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "generate_code_with_breakpoints_py") < 0)) __PYX_ERR(0, 401, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_code_obj_py = values[0]; + __pyx_v_breakpoints = ((PyObject*)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("generate_code_with_breakpoints_py", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 401, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.generate_code_with_breakpoints_py", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_breakpoints), (&PyDict_Type), 1, "breakpoints", 1))) __PYX_ERR(0, 401, __pyx_L1_error) + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_14generate_code_with_breakpoints_py(__pyx_self, __pyx_v_code_obj_py, __pyx_v_breakpoints); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_14generate_code_with_breakpoints_py(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_code_obj_py, PyObject *__pyx_v_breakpoints) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("generate_code_with_breakpoints_py", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":402 + * + * def generate_code_with_breakpoints_py(object code_obj_py, dict breakpoints): + * return generate_code_with_breakpoints(code_obj_py, breakpoints) # <<<<<<<<<<<<<< + * + * # DEBUG = True + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_generate_code_with_breakpoints(__pyx_v_code_obj_py, __pyx_v_breakpoints); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 402, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":401 + * return breakpoint_found, force_stay_in_untraced_mode + * + * def generate_code_with_breakpoints_py(object code_obj_py, dict breakpoints): # <<<<<<<<<<<<<< + * return generate_code_with_breakpoints(code_obj_py, breakpoints) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.generate_code_with_breakpoints_py", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":407 + * # debug_helper = DebugHelper() + * + * cdef generate_code_with_breakpoints(object code_obj_py, dict breakpoints): # <<<<<<<<<<<<<< + * ''' + * :param breakpoints: + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_generate_code_with_breakpoints(PyObject *__pyx_v_code_obj_py, PyObject *__pyx_v_breakpoints) { + int __pyx_v_success; + int __pyx_v_breakpoint_line; + int __pyx_v_breakpoint_found; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v_cache_value = 0; + PyObject *__pyx_v_breakpoints_hit_at_lines = 0; + PyObject *__pyx_v_line_to_offset = 0; + PyObject *__pyx_v_code_line_info = NULL; + PyObject *__pyx_v_new_code = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + Py_ssize_t __pyx_t_5; + Py_ssize_t __pyx_t_6; + int __pyx_t_7; + int __pyx_t_8; + int __pyx_t_9; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + PyObject *(*__pyx_t_12)(PyObject *); + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("generate_code_with_breakpoints", 0); + __Pyx_INCREF(__pyx_v_code_obj_py); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":424 + * cdef dict line_to_offset + * + * assert code_obj_py not in _cache, 'If a code object is cached, that same code object must be reused.' # <<<<<<<<<<<<<< + * + * # if DEBUG: + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + if (unlikely(!Py_OptimizeFlag)) { + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_cache); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 424, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_v_code_obj_py, __pyx_t_1, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 424, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!(__pyx_t_2 != 0))) { + PyErr_SetObject(PyExc_AssertionError, __pyx_kp_s_If_a_code_object_is_cached_that); + __PYX_ERR(0, 424, __pyx_L1_error) + } + } + #endif + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":429 + * # initial_code_obj_py = code_obj_py + * + * code_line_info = _get_code_line_info(code_obj_py) # <<<<<<<<<<<<<< + * + * success = True + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_get_code_line_info); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_code_obj_py) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_code_obj_py); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_code_line_info = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":431 + * code_line_info = _get_code_line_info(code_obj_py) + * + * success = True # <<<<<<<<<<<<<< + * + * breakpoints_hit_at_lines = set() + */ + __pyx_v_success = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":433 + * success = True + * + * breakpoints_hit_at_lines = set() # <<<<<<<<<<<<<< + * line_to_offset = code_line_info.line_to_offset + * + */ + __pyx_t_1 = PySet_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_breakpoints_hit_at_lines = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":434 + * + * breakpoints_hit_at_lines = set() + * line_to_offset = code_line_info.line_to_offset # <<<<<<<<<<<<<< + * + * for breakpoint_line in breakpoints: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_code_line_info, __pyx_n_s_line_to_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 434, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyDict_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 434, __pyx_L1_error) + __pyx_v_line_to_offset = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":436 + * line_to_offset = code_line_info.line_to_offset + * + * for breakpoint_line in breakpoints: # <<<<<<<<<<<<<< + * if breakpoint_line in line_to_offset: + * breakpoints_hit_at_lines.add(breakpoint_line) + */ + __pyx_t_5 = 0; + if (unlikely(__pyx_v_breakpoints == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 436, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_dict_iterator(__pyx_v_breakpoints, 1, ((PyObject *)NULL), (&__pyx_t_6), (&__pyx_t_7)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 436, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_1); + __pyx_t_1 = __pyx_t_3; + __pyx_t_3 = 0; + while (1) { + __pyx_t_8 = __Pyx_dict_iter_next(__pyx_t_1, __pyx_t_6, &__pyx_t_5, &__pyx_t_3, NULL, NULL, __pyx_t_7); + if (unlikely(__pyx_t_8 == 0)) break; + if (unlikely(__pyx_t_8 == -1)) __PYX_ERR(0, 436, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_8 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 436, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_breakpoint_line = __pyx_t_8; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":437 + * + * for breakpoint_line in breakpoints: + * if breakpoint_line in line_to_offset: # <<<<<<<<<<<<<< + * breakpoints_hit_at_lines.add(breakpoint_line) + * + */ + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_breakpoint_line); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 437, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (unlikely(__pyx_v_line_to_offset == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 437, __pyx_L1_error) + } + __pyx_t_2 = (__Pyx_PyDict_ContainsTF(__pyx_t_3, __pyx_v_line_to_offset, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 437, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_9 = (__pyx_t_2 != 0); + if (__pyx_t_9) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":438 + * for breakpoint_line in breakpoints: + * if breakpoint_line in line_to_offset: + * breakpoints_hit_at_lines.add(breakpoint_line) # <<<<<<<<<<<<<< + * + * if breakpoints_hit_at_lines: + */ + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_breakpoint_line); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 438, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_10 = PySet_Add(__pyx_v_breakpoints_hit_at_lines, __pyx_t_3); if (unlikely(__pyx_t_10 == ((int)-1))) __PYX_ERR(0, 438, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":437 + * + * for breakpoint_line in breakpoints: + * if breakpoint_line in line_to_offset: # <<<<<<<<<<<<<< + * breakpoints_hit_at_lines.add(breakpoint_line) + * + */ + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":440 + * breakpoints_hit_at_lines.add(breakpoint_line) + * + * if breakpoints_hit_at_lines: # <<<<<<<<<<<<<< + * success, new_code = insert_pydevd_breaks( + * code_obj_py, + */ + __pyx_t_9 = (PySet_GET_SIZE(__pyx_v_breakpoints_hit_at_lines) != 0); + if (__pyx_t_9) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":441 + * + * if breakpoints_hit_at_lines: + * success, new_code = insert_pydevd_breaks( # <<<<<<<<<<<<<< + * code_obj_py, + * breakpoints_hit_at_lines, + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_insert_pydevd_breaks); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 441, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":444 + * code_obj_py, + * breakpoints_hit_at_lines, + * code_line_info # <<<<<<<<<<<<<< + * ) + * + */ + __pyx_t_4 = NULL; + __pyx_t_7 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_7 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_code_obj_py, __pyx_v_breakpoints_hit_at_lines, __pyx_v_code_line_info}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 3+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 441, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_code_obj_py, __pyx_v_breakpoints_hit_at_lines, __pyx_v_code_line_info}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 3+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 441, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_11 = PyTuple_New(3+__pyx_t_7); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 441, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_code_obj_py); + __Pyx_GIVEREF(__pyx_v_code_obj_py); + PyTuple_SET_ITEM(__pyx_t_11, 0+__pyx_t_7, __pyx_v_code_obj_py); + __Pyx_INCREF(__pyx_v_breakpoints_hit_at_lines); + __Pyx_GIVEREF(__pyx_v_breakpoints_hit_at_lines); + PyTuple_SET_ITEM(__pyx_t_11, 1+__pyx_t_7, __pyx_v_breakpoints_hit_at_lines); + __Pyx_INCREF(__pyx_v_code_line_info); + __Pyx_GIVEREF(__pyx_v_code_line_info); + PyTuple_SET_ITEM(__pyx_t_11, 2+__pyx_t_7, __pyx_v_code_line_info); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_11, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 441, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 441, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_11 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_3 = PyList_GET_ITEM(sequence, 0); + __pyx_t_11 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_11); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 441, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_11 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 441, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_4 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 441, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = Py_TYPE(__pyx_t_4)->tp_iternext; + index = 0; __pyx_t_3 = __pyx_t_12(__pyx_t_4); if (unlikely(!__pyx_t_3)) goto __pyx_L7_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 1; __pyx_t_11 = __pyx_t_12(__pyx_t_4); if (unlikely(!__pyx_t_11)) goto __pyx_L7_unpacking_failed; + __Pyx_GOTREF(__pyx_t_11); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_12(__pyx_t_4), 2) < 0) __PYX_ERR(0, 441, __pyx_L1_error) + __pyx_t_12 = NULL; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L8_unpacking_done; + __pyx_L7_unpacking_failed:; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_12 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 441, __pyx_L1_error) + __pyx_L8_unpacking_done:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":441 + * + * if breakpoints_hit_at_lines: + * success, new_code = insert_pydevd_breaks( # <<<<<<<<<<<<<< + * code_obj_py, + * breakpoints_hit_at_lines, + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_9 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 441, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_success = __pyx_t_9; + __pyx_v_new_code = __pyx_t_11; + __pyx_t_11 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":447 + * ) + * + * if not success: # <<<<<<<<<<<<<< + * code_obj_py = None + * else: + */ + __pyx_t_9 = ((!(__pyx_v_success != 0)) != 0); + if (__pyx_t_9) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":448 + * + * if not success: + * code_obj_py = None # <<<<<<<<<<<<<< + * else: + * code_obj_py = new_code + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_code_obj_py, Py_None); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":447 + * ) + * + * if not success: # <<<<<<<<<<<<<< + * code_obj_py = None + * else: + */ + goto __pyx_L9; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":450 + * code_obj_py = None + * else: + * code_obj_py = new_code # <<<<<<<<<<<<<< + * + * breakpoint_found = bool(breakpoints_hit_at_lines) + */ + /*else*/ { + __Pyx_INCREF(__pyx_v_new_code); + __Pyx_DECREF_SET(__pyx_v_code_obj_py, __pyx_v_new_code); + } + __pyx_L9:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":440 + * breakpoints_hit_at_lines.add(breakpoint_line) + * + * if breakpoints_hit_at_lines: # <<<<<<<<<<<<<< + * success, new_code = insert_pydevd_breaks( + * code_obj_py, + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":452 + * code_obj_py = new_code + * + * breakpoint_found = bool(breakpoints_hit_at_lines) # <<<<<<<<<<<<<< + * if breakpoint_found and success: + * # if DEBUG: + */ + __pyx_t_9 = (PySet_GET_SIZE(__pyx_v_breakpoints_hit_at_lines) != 0); + __pyx_v_breakpoint_found = (!(!__pyx_t_9)); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":453 + * + * breakpoint_found = bool(breakpoints_hit_at_lines) + * if breakpoint_found and success: # <<<<<<<<<<<<<< + * # if DEBUG: + * # op_number = debug_helper.write_dis( + */ + __pyx_t_2 = (__pyx_v_breakpoint_found != 0); + if (__pyx_t_2) { + } else { + __pyx_t_9 = __pyx_t_2; + goto __pyx_L11_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_success != 0); + __pyx_t_9 = __pyx_t_2; + __pyx_L11_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":466 + * # ) + * + * cache_value = _CacheValue(code_obj_py, code_line_info, breakpoints_hit_at_lines) # <<<<<<<<<<<<<< + * _cache[code_obj_py] = cache_value + * + */ + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 466, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_code_obj_py); + __Pyx_GIVEREF(__pyx_v_code_obj_py); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_code_obj_py); + __Pyx_INCREF(__pyx_v_code_line_info); + __Pyx_GIVEREF(__pyx_v_code_line_info); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_code_line_info); + __Pyx_INCREF(__pyx_v_breakpoints_hit_at_lines); + __Pyx_GIVEREF(__pyx_v_breakpoints_hit_at_lines); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_breakpoints_hit_at_lines); + __pyx_t_11 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue), __pyx_t_1, NULL); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 466, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_cache_value = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_t_11); + __pyx_t_11 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":467 + * + * cache_value = _CacheValue(code_obj_py, code_line_info, breakpoints_hit_at_lines) + * _cache[code_obj_py] = cache_value # <<<<<<<<<<<<<< + * + * return breakpoint_found, code_obj_py + */ + __Pyx_GetModuleGlobalName(__pyx_t_11, __pyx_n_s_cache); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 467, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (unlikely(PyObject_SetItem(__pyx_t_11, __pyx_v_code_obj_py, ((PyObject *)__pyx_v_cache_value)) < 0)) __PYX_ERR(0, 467, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":453 + * + * breakpoint_found = bool(breakpoints_hit_at_lines) + * if breakpoint_found and success: # <<<<<<<<<<<<<< + * # if DEBUG: + * # op_number = debug_helper.write_dis( + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":469 + * _cache[code_obj_py] = cache_value + * + * return breakpoint_found, code_obj_py # <<<<<<<<<<<<<< + * + * import sys + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_11 = __Pyx_PyBool_FromLong(__pyx_v_breakpoint_found); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 469, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 469, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_11); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_11); + __Pyx_INCREF(__pyx_v_code_obj_py); + __Pyx_GIVEREF(__pyx_v_code_obj_py); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_code_obj_py); + __pyx_t_11 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":407 + * # debug_helper = DebugHelper() + * + * cdef generate_code_with_breakpoints(object code_obj_py, dict breakpoints): # <<<<<<<<<<<<<< + * ''' + * :param breakpoints: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.generate_code_with_breakpoints", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_cache_value); + __Pyx_XDECREF(__pyx_v_breakpoints_hit_at_lines); + __Pyx_XDECREF(__pyx_v_line_to_offset); + __Pyx_XDECREF(__pyx_v_code_line_info); + __Pyx_XDECREF(__pyx_v_new_code); + __Pyx_XDECREF(__pyx_v_code_obj_py); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":475 + * cdef bint IS_PY_39_OWNARDS = sys.version_info[:2] >= (3, 9) + * + * def frame_eval_func(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * if IS_PY_39_OWNARDS: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_17frame_eval_func(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_17frame_eval_func = {"frame_eval_func", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_17frame_eval_func, METH_NOARGS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_17frame_eval_func(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("frame_eval_func (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_16frame_eval_func(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_16frame_eval_func(CYTHON_UNUSED PyObject *__pyx_self) { + PyThreadState *__pyx_v_state; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("frame_eval_func", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":476 + * + * def frame_eval_func(): + * cdef PyThreadState *state = PyThreadState_Get() # <<<<<<<<<<<<<< + * if IS_PY_39_OWNARDS: + * state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_39 + */ + __pyx_v_state = PyThreadState_Get(); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":477 + * def frame_eval_func(): + * cdef PyThreadState *state = PyThreadState_Get() + * if IS_PY_39_OWNARDS: # <<<<<<<<<<<<<< + * state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_39 + * else: + */ + __pyx_t_1 = (__pyx_v_18_pydevd_frame_eval_22pydevd_frame_evaluator_IS_PY_39_OWNARDS != 0); + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":478 + * cdef PyThreadState *state = PyThreadState_Get() + * if IS_PY_39_OWNARDS: + * state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_39 # <<<<<<<<<<<<<< + * else: + * state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_38 + */ + __pyx_v_state->interp->eval_frame = ((_PyFrameEvalFunction *)__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytecode_while_frame_eval_39); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":477 + * def frame_eval_func(): + * cdef PyThreadState *state = PyThreadState_Get() + * if IS_PY_39_OWNARDS: # <<<<<<<<<<<<<< + * state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_39 + * else: + */ + goto __pyx_L3; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":480 + * state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_39 + * else: + * state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_38 # <<<<<<<<<<<<<< + * dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) + * + */ + /*else*/ { + __pyx_v_state->interp->eval_frame = ((_PyFrameEvalFunction *)__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytecode_while_frame_eval_38); + } + __pyx_L3:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":481 + * else: + * state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_38 + * dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_dummy_tracing_holder); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 481, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_set_trace_func); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 481, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_dummy_trace_dispatch); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 481, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_2 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_t_3) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_3); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 481, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":475 + * cdef bint IS_PY_39_OWNARDS = sys.version_info[:2] >= (3, 9) + * + * def frame_eval_func(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * if IS_PY_39_OWNARDS: + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.frame_eval_func", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":484 + * + * + * def stop_frame_eval(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_19stop_frame_eval(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_19stop_frame_eval = {"stop_frame_eval", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_19stop_frame_eval, METH_NOARGS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_19stop_frame_eval(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("stop_frame_eval (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_18stop_frame_eval(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_18stop_frame_eval(CYTHON_UNUSED PyObject *__pyx_self) { + PyThreadState *__pyx_v_state; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("stop_frame_eval", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":485 + * + * def stop_frame_eval(): + * cdef PyThreadState *state = PyThreadState_Get() # <<<<<<<<<<<<<< + * state.interp.eval_frame = _PyEval_EvalFrameDefault + * + */ + __pyx_v_state = PyThreadState_Get(); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":486 + * def stop_frame_eval(): + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault # <<<<<<<<<<<<<< + * + * # During the build we'll generate 2 versions of the code below so that we're compatible with + */ + __pyx_v_state->interp->eval_frame = _PyEval_EvalFrameDefault; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":484 + * + * + * def stop_frame_eval(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":494 + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * cdef PyObject * get_bytecode_while_frame_eval_38(PyFrameObject * frame_obj, int exc): # <<<<<<<<<<<<<< + * ''' + * This function makes the actual evaluation and changes the bytecode to a version + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytecode_while_frame_eval_38(PyFrameObject *__pyx_v_frame_obj, int __pyx_v_exc) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_thread_info = 0; + CYTHON_UNUSED int __pyx_v_STATE_SUSPEND; + int __pyx_v_CMD_STEP_INTO; + int __pyx_v_CMD_STEP_OVER; + int __pyx_v_CMD_STEP_OVER_MY_CODE; + int __pyx_v_CMD_STEP_INTO_MY_CODE; + int __pyx_v_CMD_STEP_INTO_COROUTINE; + int __pyx_v_CMD_SMART_STEP_INTO; + int __pyx_v_can_skip; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_additional_info = 0; + PyObject *__pyx_v_main_debugger = 0; + PyObject *__pyx_v_frame = NULL; + PyObject *__pyx_v_trace_func = NULL; + PyObject *__pyx_v_apply_to_global = NULL; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_func_code_info = 0; + PyObject *__pyx_v_old = NULL; + PyObject *__pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + int __pyx_t_11; + PyObject *(*__pyx_t_12)(PyObject *); + int __pyx_t_13; + char const *__pyx_t_14; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + PyObject *__pyx_t_18; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_bytecode_while_frame_eval_38", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":499 + * where programmatic breakpoints are added. + * ''' + * if GlobalDebuggerHolder is None or _thread_local_info is None or exc: # <<<<<<<<<<<<<< + * # Sometimes during process shutdown these global variables become None + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_GlobalDebuggerHolder); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 499, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = (__pyx_t_2 == Py_None); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_4 = (__pyx_t_3 != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_1 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_thread_local_info); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 499, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = (__pyx_t_2 == Py_None); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_3 = (__pyx_t_4 != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = (__pyx_v_exc != 0); + __pyx_t_1 = __pyx_t_3; + __pyx_L4_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":501 + * if GlobalDebuggerHolder is None or _thread_local_info is None or exc: + * # Sometimes during process shutdown these global variables become None + * return CALL_EvalFrameDefault_38(frame_obj, exc) # <<<<<<<<<<<<<< + * + * # co_filename: str = frame_obj.f_code.co_filename + */ + __pyx_r = CALL_EvalFrameDefault_38(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":499 + * where programmatic breakpoints are added. + * ''' + * if GlobalDebuggerHolder is None or _thread_local_info is None or exc: # <<<<<<<<<<<<<< + * # Sometimes during process shutdown these global variables become None + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":508 + * + * cdef ThreadInfo thread_info + * cdef int STATE_SUSPEND = 2 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_INTO = 107 + * cdef int CMD_STEP_OVER = 108 + */ + __pyx_v_STATE_SUSPEND = 2; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":509 + * cdef ThreadInfo thread_info + * cdef int STATE_SUSPEND = 2 + * cdef int CMD_STEP_INTO = 107 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_OVER = 108 + * cdef int CMD_STEP_OVER_MY_CODE = 159 + */ + __pyx_v_CMD_STEP_INTO = 0x6B; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":510 + * cdef int STATE_SUSPEND = 2 + * cdef int CMD_STEP_INTO = 107 + * cdef int CMD_STEP_OVER = 108 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_OVER_MY_CODE = 159 + * cdef int CMD_STEP_INTO_MY_CODE = 144 + */ + __pyx_v_CMD_STEP_OVER = 0x6C; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":511 + * cdef int CMD_STEP_INTO = 107 + * cdef int CMD_STEP_OVER = 108 + * cdef int CMD_STEP_OVER_MY_CODE = 159 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_INTO_MY_CODE = 144 + * cdef int CMD_STEP_INTO_COROUTINE = 206 + */ + __pyx_v_CMD_STEP_OVER_MY_CODE = 0x9F; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":512 + * cdef int CMD_STEP_OVER = 108 + * cdef int CMD_STEP_OVER_MY_CODE = 159 + * cdef int CMD_STEP_INTO_MY_CODE = 144 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_INTO_COROUTINE = 206 + * cdef int CMD_SMART_STEP_INTO = 128 + */ + __pyx_v_CMD_STEP_INTO_MY_CODE = 0x90; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":513 + * cdef int CMD_STEP_OVER_MY_CODE = 159 + * cdef int CMD_STEP_INTO_MY_CODE = 144 + * cdef int CMD_STEP_INTO_COROUTINE = 206 # <<<<<<<<<<<<<< + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True + */ + __pyx_v_CMD_STEP_INTO_COROUTINE = 0xCE; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":514 + * cdef int CMD_STEP_INTO_MY_CODE = 144 + * cdef int CMD_STEP_INTO_COROUTINE = 206 + * cdef int CMD_SMART_STEP_INTO = 128 # <<<<<<<<<<<<<< + * cdef bint can_skip = True + * try: + */ + __pyx_v_CMD_SMART_STEP_INTO = 0x80; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":515 + * cdef int CMD_STEP_INTO_COROUTINE = 206 + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True # <<<<<<<<<<<<<< + * try: + * thread_info = _thread_local_info.thread_info + */ + __pyx_v_can_skip = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":516 + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True + * try: # <<<<<<<<<<<<<< + * thread_info = _thread_local_info.thread_info + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_7); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":517 + * cdef bint can_skip = True + * try: + * thread_info = _thread_local_info.thread_info # <<<<<<<<<<<<<< + * except: + * thread_info = get_thread_info(frame_obj) + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_thread_local_info); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 517, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_thread_info); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 517, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!(likely(((__pyx_t_8) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_8, __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo))))) __PYX_ERR(0, 517, __pyx_L7_error) + __pyx_v_thread_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":516 + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True + * try: # <<<<<<<<<<<<<< + * thread_info = _thread_local_info.thread_info + * except: + */ + } + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L12_try_end; + __pyx_L7_error:; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":518 + * try: + * thread_info = _thread_local_info.thread_info + * except: # <<<<<<<<<<<<<< + * thread_info = get_thread_info(frame_obj) + * if thread_info is None: + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_bytecode_while_frame_eval_38", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_2, &__pyx_t_9) < 0) __PYX_ERR(0, 518, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_9); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":519 + * thread_info = _thread_local_info.thread_info + * except: + * thread_info = get_thread_info(frame_obj) # <<<<<<<<<<<<<< + * if thread_info is None: + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + __pyx_t_10 = ((PyObject *)__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_thread_info(__pyx_v_frame_obj)); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 519, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_XDECREF_SET(__pyx_v_thread_info, ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_t_10)); + __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":520 + * except: + * thread_info = get_thread_info(frame_obj) + * if thread_info is None: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + */ + __pyx_t_1 = (((PyObject *)__pyx_v_thread_info) == Py_None); + __pyx_t_3 = (__pyx_t_1 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":521 + * thread_info = get_thread_info(frame_obj) + * if thread_info is None: + * return CALL_EvalFrameDefault_38(frame_obj, exc) # <<<<<<<<<<<<<< + * + * if thread_info.inside_frame_eval: + */ + __pyx_r = CALL_EvalFrameDefault_38(__pyx_v_frame_obj, __pyx_v_exc); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L10_except_return; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":520 + * except: + * thread_info = get_thread_info(frame_obj) + * if thread_info is None: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + */ + } + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L8_exception_handled; + } + __pyx_L9_except_error:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":516 + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True + * try: # <<<<<<<<<<<<<< + * thread_info = _thread_local_info.thread_info + * except: + */ + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + goto __pyx_L1_error; + __pyx_L10_except_return:; + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + goto __pyx_L0; + __pyx_L8_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + __pyx_L12_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":523 + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + * if thread_info.inside_frame_eval: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + */ + __pyx_t_3 = (__pyx_v_thread_info->inside_frame_eval != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":524 + * + * if thread_info.inside_frame_eval: + * return CALL_EvalFrameDefault_38(frame_obj, exc) # <<<<<<<<<<<<<< + * + * if not thread_info.fully_initialized: + */ + __pyx_r = CALL_EvalFrameDefault_38(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":523 + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + * if thread_info.inside_frame_eval: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":526 + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + * if not thread_info.fully_initialized: # <<<<<<<<<<<<<< + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: + */ + __pyx_t_3 = ((!(__pyx_v_thread_info->fully_initialized != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":527 + * + * if not thread_info.fully_initialized: + * thread_info.initialize_if_possible() # <<<<<<<<<<<<<< + * if not thread_info.fully_initialized: + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + __pyx_t_9 = ((struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_thread_info->__pyx_vtab)->initialize_if_possible(__pyx_v_thread_info); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 527, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":528 + * if not thread_info.fully_initialized: + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + */ + __pyx_t_3 = ((!(__pyx_v_thread_info->fully_initialized != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":529 + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: + * return CALL_EvalFrameDefault_38(frame_obj, exc) # <<<<<<<<<<<<<< + * + * # Can only get additional_info when fully initialized. + */ + __pyx_r = CALL_EvalFrameDefault_38(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":528 + * if not thread_info.fully_initialized: + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":526 + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * + * if not thread_info.fully_initialized: # <<<<<<<<<<<<<< + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":532 + * + * # Can only get additional_info when fully initialized. + * cdef PyDBAdditionalThreadInfo additional_info = thread_info.additional_info # <<<<<<<<<<<<<< + * if thread_info.is_pydevd_thread or additional_info.is_tracing: + * # Make sure that we don't trace pydevd threads or inside our own calls. + */ + __pyx_t_9 = ((PyObject *)__pyx_v_thread_info->additional_info); + __Pyx_INCREF(__pyx_t_9); + __pyx_v_additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_9); + __pyx_t_9 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":533 + * # Can only get additional_info when fully initialized. + * cdef PyDBAdditionalThreadInfo additional_info = thread_info.additional_info + * if thread_info.is_pydevd_thread or additional_info.is_tracing: # <<<<<<<<<<<<<< + * # Make sure that we don't trace pydevd threads or inside our own calls. + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + __pyx_t_1 = (__pyx_v_thread_info->is_pydevd_thread != 0); + if (!__pyx_t_1) { + } else { + __pyx_t_3 = __pyx_t_1; + goto __pyx_L20_bool_binop_done; + } + __pyx_t_1 = (__pyx_v_additional_info->is_tracing != 0); + __pyx_t_3 = __pyx_t_1; + __pyx_L20_bool_binop_done:; + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":535 + * if thread_info.is_pydevd_thread or additional_info.is_tracing: + * # Make sure that we don't trace pydevd threads or inside our own calls. + * return CALL_EvalFrameDefault_38(frame_obj, exc) # <<<<<<<<<<<<<< + * + * # frame = frame_obj + */ + __pyx_r = CALL_EvalFrameDefault_38(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":533 + * # Can only get additional_info when fully initialized. + * cdef PyDBAdditionalThreadInfo additional_info = thread_info.additional_info + * if thread_info.is_pydevd_thread or additional_info.is_tracing: # <<<<<<<<<<<<<< + * # Make sure that we don't trace pydevd threads or inside our own calls. + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":542 + * # print('get_bytecode_while_frame_eval', frame.f_lineno, frame.f_code.co_name, frame.f_code.co_filename) + * + * thread_info.inside_frame_eval += 1 # <<<<<<<<<<<<<< + * additional_info.is_tracing = True + * try: + */ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval + 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":543 + * + * thread_info.inside_frame_eval += 1 + * additional_info.is_tracing = True # <<<<<<<<<<<<<< + * try: + * main_debugger: object = GlobalDebuggerHolder.global_dbg + */ + __pyx_v_additional_info->is_tracing = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":544 + * thread_info.inside_frame_eval += 1 + * additional_info.is_tracing = True + * try: # <<<<<<<<<<<<<< + * main_debugger: object = GlobalDebuggerHolder.global_dbg + * if main_debugger is None: + */ + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":545 + * additional_info.is_tracing = True + * try: + * main_debugger: object = GlobalDebuggerHolder.global_dbg # <<<<<<<<<<<<<< + * if main_debugger is None: + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_GlobalDebuggerHolder); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 545, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_global_dbg); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 545, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_v_main_debugger = __pyx_t_2; + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":546 + * try: + * main_debugger: object = GlobalDebuggerHolder.global_dbg + * if main_debugger is None: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * frame = frame_obj + */ + __pyx_t_3 = (__pyx_v_main_debugger == Py_None); + __pyx_t_1 = (__pyx_t_3 != 0); + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":547 + * main_debugger: object = GlobalDebuggerHolder.global_dbg + * if main_debugger is None: + * return CALL_EvalFrameDefault_38(frame_obj, exc) # <<<<<<<<<<<<<< + * frame = frame_obj + * + */ + __pyx_r = CALL_EvalFrameDefault_38(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L22_return; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":546 + * try: + * main_debugger: object = GlobalDebuggerHolder.global_dbg + * if main_debugger is None: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * frame = frame_obj + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":548 + * if main_debugger is None: + * return CALL_EvalFrameDefault_38(frame_obj, exc) + * frame = frame_obj # <<<<<<<<<<<<<< + * + * if thread_info.thread_trace_func is None: + */ + __pyx_t_2 = ((PyObject *)__pyx_v_frame_obj); + __Pyx_INCREF(__pyx_t_2); + __pyx_v_frame = __pyx_t_2; + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":550 + * frame = frame_obj + * + * if thread_info.thread_trace_func is None: # <<<<<<<<<<<<<< + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: + */ + __pyx_t_1 = (__pyx_v_thread_info->thread_trace_func == Py_None); + __pyx_t_3 = (__pyx_t_1 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":551 + * + * if thread_info.thread_trace_func is None: + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) # <<<<<<<<<<<<<< + * if apply_to_global: + * thread_info.thread_trace_func = trace_func + */ + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_fix_top_level_trace_and_get_trac); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 551, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_8 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_v_main_debugger, __pyx_v_frame}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 551, __pyx_L23_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_v_main_debugger, __pyx_v_frame}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 551, __pyx_L23_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_10 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 551, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_11, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_11, __pyx_v_frame); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_t_10, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 551, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_2))) || (PyList_CheckExact(__pyx_t_2))) { + PyObject* sequence = __pyx_t_2; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 551, __pyx_L23_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_9 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_10 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_9 = PyList_GET_ITEM(sequence, 0); + __pyx_t_10 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(__pyx_t_10); + #else + __pyx_t_9 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 551, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 551, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + #endif + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_8 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 551, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_12 = Py_TYPE(__pyx_t_8)->tp_iternext; + index = 0; __pyx_t_9 = __pyx_t_12(__pyx_t_8); if (unlikely(!__pyx_t_9)) goto __pyx_L27_unpacking_failed; + __Pyx_GOTREF(__pyx_t_9); + index = 1; __pyx_t_10 = __pyx_t_12(__pyx_t_8); if (unlikely(!__pyx_t_10)) goto __pyx_L27_unpacking_failed; + __Pyx_GOTREF(__pyx_t_10); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_12(__pyx_t_8), 2) < 0) __PYX_ERR(0, 551, __pyx_L23_error) + __pyx_t_12 = NULL; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L28_unpacking_done; + __pyx_L27_unpacking_failed:; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_12 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 551, __pyx_L23_error) + __pyx_L28_unpacking_done:; + } + __pyx_v_trace_func = __pyx_t_9; + __pyx_t_9 = 0; + __pyx_v_apply_to_global = __pyx_t_10; + __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":552 + * if thread_info.thread_trace_func is None: + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: # <<<<<<<<<<<<<< + * thread_info.thread_trace_func = trace_func + * + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_apply_to_global); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 552, __pyx_L23_error) + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":553 + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: + * thread_info.thread_trace_func = trace_func # <<<<<<<<<<<<<< + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ + */ + __Pyx_INCREF(__pyx_v_trace_func); + __Pyx_GIVEREF(__pyx_v_trace_func); + __Pyx_GOTREF(__pyx_v_thread_info->thread_trace_func); + __Pyx_DECREF(__pyx_v_thread_info->thread_trace_func); + __pyx_v_thread_info->thread_trace_func = __pyx_v_trace_func; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":552 + * if thread_info.thread_trace_func is None: + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: # <<<<<<<<<<<<<< + * thread_info.thread_trace_func = trace_func + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":550 + * frame = frame_obj + * + * if thread_info.thread_trace_func is None: # <<<<<<<<<<<<<< + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":555 + * thread_info.thread_trace_func = trace_func + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ # <<<<<<<<<<<<<< + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ + */ + __pyx_t_11 = __pyx_v_additional_info->pydev_step_cmd; + __pyx_t_4 = ((__pyx_t_11 == __pyx_v_CMD_STEP_INTO) != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_1 = __pyx_t_4; + goto __pyx_L33_bool_binop_done; + } + __pyx_t_4 = ((__pyx_t_11 == __pyx_v_CMD_STEP_INTO_MY_CODE) != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_1 = __pyx_t_4; + goto __pyx_L33_bool_binop_done; + } + __pyx_t_4 = ((__pyx_t_11 == __pyx_v_CMD_STEP_INTO_COROUTINE) != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_1 = __pyx_t_4; + goto __pyx_L33_bool_binop_done; + } + __pyx_t_4 = ((__pyx_t_11 == __pyx_v_CMD_SMART_STEP_INTO) != 0); + __pyx_t_1 = __pyx_t_4; + __pyx_L33_bool_binop_done:; + __pyx_t_4 = (__pyx_t_1 != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":556 + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ + * main_debugger.break_on_caught_exceptions or \ # <<<<<<<<<<<<<< + * main_debugger.break_on_user_uncaught_exceptions or \ + * main_debugger.has_plugin_exception_breaks or \ + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_caught_exceptions); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 556, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 556, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":557 + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ # <<<<<<<<<<<<<< + * main_debugger.has_plugin_exception_breaks or \ + * main_debugger.signature_factory or \ + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_user_uncaught_exception); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 557, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 557, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":558 + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ + * main_debugger.has_plugin_exception_breaks or \ # <<<<<<<<<<<<<< + * main_debugger.signature_factory or \ + * additional_info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and main_debugger.show_return_values and frame.f_back is additional_info.pydev_step_stop: + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_exception_breaks); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 558, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 558, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":559 + * main_debugger.break_on_user_uncaught_exceptions or \ + * main_debugger.has_plugin_exception_breaks or \ + * main_debugger.signature_factory or \ # <<<<<<<<<<<<<< + * additional_info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and main_debugger.show_return_values and frame.f_back is additional_info.pydev_step_stop: + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_signature_factory); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 559, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 559, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":560 + * main_debugger.has_plugin_exception_breaks or \ + * main_debugger.signature_factory or \ + * additional_info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and main_debugger.show_return_values and frame.f_back is additional_info.pydev_step_stop: # <<<<<<<<<<<<<< + * + * # if DEBUG: + */ + __pyx_t_11 = __pyx_v_additional_info->pydev_step_cmd; + __pyx_t_1 = ((__pyx_t_11 == __pyx_v_CMD_STEP_OVER) != 0); + if (!__pyx_t_1) { + } else { + __pyx_t_4 = __pyx_t_1; + goto __pyx_L42_bool_binop_done; + } + __pyx_t_1 = ((__pyx_t_11 == __pyx_v_CMD_STEP_OVER_MY_CODE) != 0); + __pyx_t_4 = __pyx_t_1; + __pyx_L42_bool_binop_done:; + __pyx_t_1 = (__pyx_t_4 != 0); + if (__pyx_t_1) { + } else { + __pyx_t_3 = __pyx_t_1; + goto __pyx_L31_bool_binop_done; + } + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_show_return_values); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 560, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 560, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__pyx_t_1) { + } else { + __pyx_t_3 = __pyx_t_1; + goto __pyx_L31_bool_binop_done; + } + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 560, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = (__pyx_t_2 == __pyx_v_additional_info->pydev_step_stop); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_4 = (__pyx_t_1 != 0); + __pyx_t_3 = __pyx_t_4; + __pyx_L31_bool_binop_done:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":555 + * thread_info.thread_trace_func = trace_func + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ # <<<<<<<<<<<<<< + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ + */ + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":564 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval enabled trace') + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + __pyx_t_3 = (__pyx_v_thread_info->thread_trace_func != Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":565 + * # print('get_bytecode_while_frame_eval enabled trace') + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func # <<<<<<<<<<<<<< + * else: + * frame.f_trace = main_debugger.trace_dispatch + */ + __pyx_t_2 = __pyx_v_thread_info->thread_trace_func; + __Pyx_INCREF(__pyx_t_2); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_2) < 0) __PYX_ERR(0, 565, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":564 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval enabled trace') + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + goto __pyx_L45; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":567 + * frame.f_trace = thread_info.thread_trace_func + * else: + * frame.f_trace = main_debugger.trace_dispatch # <<<<<<<<<<<<<< + * else: + * func_code_info: FuncCodeInfo = get_func_code_info(thread_info, frame_obj, frame_obj.f_code) + */ + /*else*/ { + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 567, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_10 = __pyx_t_2; + __Pyx_INCREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_10) < 0) __PYX_ERR(0, 567, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __pyx_L45:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":555 + * thread_info.thread_trace_func = trace_func + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ # <<<<<<<<<<<<<< + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ + */ + goto __pyx_L30; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":569 + * frame.f_trace = main_debugger.trace_dispatch + * else: + * func_code_info: FuncCodeInfo = get_func_code_info(thread_info, frame_obj, frame_obj.f_code) # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval always skip', func_code_info.always_skip_code) + */ + /*else*/ { + __pyx_t_10 = ((PyObject *)__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_func_code_info(__pyx_v_thread_info, __pyx_v_frame_obj, __pyx_v_frame_obj->f_code)); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 569, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_v_func_code_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_t_10); + __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":572 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval always skip', func_code_info.always_skip_code) + * if not func_code_info.always_skip_code: # <<<<<<<<<<<<<< + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: + */ + __pyx_t_4 = ((!(__pyx_v_func_code_info->always_skip_code != 0)) != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":574 + * if not func_code_info.always_skip_code: + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: # <<<<<<<<<<<<<< + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) + * + */ + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_line_breaks); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 574, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_10); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 574, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if (!__pyx_t_3) { + } else { + __pyx_t_4 = __pyx_t_3; + goto __pyx_L48_bool_binop_done; + } + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_exception_breaks); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 574, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_10); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 574, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_4 = __pyx_t_3; + __pyx_L48_bool_binop_done:; + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":575 + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) # <<<<<<<<<<<<<< + * + * if not can_skip: + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_plugin); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 575, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_can_skip); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 575, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_2, __pyx_v_main_debugger, ((PyObject *)__pyx_v_frame_obj)}; + __pyx_t_10 = __Pyx_PyFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 575, __pyx_L23_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_10); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_2, __pyx_v_main_debugger, ((PyObject *)__pyx_v_frame_obj)}; + __pyx_t_10 = __Pyx_PyCFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 575, __pyx_L23_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_10); + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 575, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_2) { + __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_2); __pyx_t_2 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_11, __pyx_v_main_debugger); + __Pyx_INCREF(((PyObject *)__pyx_v_frame_obj)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_frame_obj)); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_11, ((PyObject *)__pyx_v_frame_obj)); + __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_t_8, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 575, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_10); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 575, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_v_can_skip = __pyx_t_4; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":577 + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) + * + * if not can_skip: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval not can_skip') + */ + __pyx_t_4 = ((!(__pyx_v_can_skip != 0)) != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":580 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval not can_skip') + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + __pyx_t_4 = (__pyx_v_thread_info->thread_trace_func != Py_None); + __pyx_t_3 = (__pyx_t_4 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":581 + * # print('get_bytecode_while_frame_eval not can_skip') + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func # <<<<<<<<<<<<<< + * else: + * frame.f_trace = main_debugger.trace_dispatch + */ + __pyx_t_10 = __pyx_v_thread_info->thread_trace_func; + __Pyx_INCREF(__pyx_t_10); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_10) < 0) __PYX_ERR(0, 581, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":580 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval not can_skip') + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + goto __pyx_L51; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":583 + * frame.f_trace = thread_info.thread_trace_func + * else: + * frame.f_trace = main_debugger.trace_dispatch # <<<<<<<<<<<<<< + * + * if can_skip and func_code_info.breakpoint_found: + */ + /*else*/ { + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 583, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_9 = __pyx_t_10; + __Pyx_INCREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_9) < 0) __PYX_ERR(0, 583, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + __pyx_L51:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":577 + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) + * + * if not can_skip: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval not can_skip') + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":574 + * if not func_code_info.always_skip_code: + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: # <<<<<<<<<<<<<< + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":585 + * frame.f_trace = main_debugger.trace_dispatch + * + * if can_skip and func_code_info.breakpoint_found: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval new_code', func_code_info.new_code) + */ + __pyx_t_4 = (__pyx_v_can_skip != 0); + if (__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L53_bool_binop_done; + } + __pyx_t_4 = (__pyx_v_func_code_info->breakpoint_found != 0); + __pyx_t_3 = __pyx_t_4; + __pyx_L53_bool_binop_done:; + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":588 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval new_code', func_code_info.new_code) + * if not thread_info.force_stay_in_untraced_mode: # <<<<<<<<<<<<<< + * # If breakpoints are found but new_code is None, + * # this means we weren't able to actually add the code + */ + __pyx_t_3 = ((!(__pyx_v_thread_info->force_stay_in_untraced_mode != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":592 + * # this means we weren't able to actually add the code + * # where needed, so, fallback to tracing. + * if func_code_info.new_code is None: # <<<<<<<<<<<<<< + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func + */ + __pyx_t_3 = (__pyx_v_func_code_info->new_code == Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":593 + * # where needed, so, fallback to tracing. + * if func_code_info.new_code is None: + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + __pyx_t_4 = (__pyx_v_thread_info->thread_trace_func != Py_None); + __pyx_t_3 = (__pyx_t_4 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":594 + * if func_code_info.new_code is None: + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func # <<<<<<<<<<<<<< + * else: + * frame.f_trace = main_debugger.trace_dispatch + */ + __pyx_t_9 = __pyx_v_thread_info->thread_trace_func; + __Pyx_INCREF(__pyx_t_9); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_9) < 0) __PYX_ERR(0, 594, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":593 + * # where needed, so, fallback to tracing. + * if func_code_info.new_code is None: + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + goto __pyx_L57; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":596 + * frame.f_trace = thread_info.thread_trace_func + * else: + * frame.f_trace = main_debugger.trace_dispatch # <<<<<<<<<<<<<< + * else: + * # print('Using frame eval break for', frame_obj.f_code.co_name) + */ + /*else*/ { + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 596, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = __pyx_t_9; + __Pyx_INCREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_10) < 0) __PYX_ERR(0, 596, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __pyx_L57:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":592 + * # this means we weren't able to actually add the code + * # where needed, so, fallback to tracing. + * if func_code_info.new_code is None: # <<<<<<<<<<<<<< + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func + */ + goto __pyx_L56; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":599 + * else: + * # print('Using frame eval break for', frame_obj.f_code.co_name) + * update_globals_dict( frame_obj.f_globals) # <<<<<<<<<<<<<< + * Py_INCREF(func_code_info.new_code) + * old = frame_obj.f_code + */ + /*else*/ { + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_update_globals_dict); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 599, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + } + } + __pyx_t_10 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_9, __pyx_t_8, ((PyObject *)__pyx_v_frame_obj->f_globals)) : __Pyx_PyObject_CallOneArg(__pyx_t_9, ((PyObject *)__pyx_v_frame_obj->f_globals)); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 599, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":600 + * # print('Using frame eval break for', frame_obj.f_code.co_name) + * update_globals_dict( frame_obj.f_globals) + * Py_INCREF(func_code_info.new_code) # <<<<<<<<<<<<<< + * old = frame_obj.f_code + * frame_obj.f_code = func_code_info.new_code + */ + __pyx_t_10 = __pyx_v_func_code_info->new_code; + __Pyx_INCREF(__pyx_t_10); + Py_INCREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":601 + * update_globals_dict( frame_obj.f_globals) + * Py_INCREF(func_code_info.new_code) + * old = frame_obj.f_code # <<<<<<<<<<<<<< + * frame_obj.f_code = func_code_info.new_code + * Py_DECREF(old) + */ + __pyx_t_10 = ((PyObject *)__pyx_v_frame_obj->f_code); + __Pyx_INCREF(__pyx_t_10); + __pyx_v_old = __pyx_t_10; + __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":602 + * Py_INCREF(func_code_info.new_code) + * old = frame_obj.f_code + * frame_obj.f_code = func_code_info.new_code # <<<<<<<<<<<<<< + * Py_DECREF(old) + * else: + */ + __pyx_v_frame_obj->f_code = ((PyCodeObject *)__pyx_v_func_code_info->new_code); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":603 + * old = frame_obj.f_code + * frame_obj.f_code = func_code_info.new_code + * Py_DECREF(old) # <<<<<<<<<<<<<< + * else: + * # When we're forcing to stay in traced mode we need to + */ + Py_DECREF(__pyx_v_old); + } + __pyx_L56:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":588 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval new_code', func_code_info.new_code) + * if not thread_info.force_stay_in_untraced_mode: # <<<<<<<<<<<<<< + * # If breakpoints are found but new_code is None, + * # this means we weren't able to actually add the code + */ + goto __pyx_L55; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":608 + * # update the globals dict (because this means that we're reusing + * # a previous code which had breakpoints added in a new frame). + * update_globals_dict( frame_obj.f_globals) # <<<<<<<<<<<<<< + * + * finally: + */ + /*else*/ { + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_update_globals_dict); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 608, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + } + } + __pyx_t_10 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_9, __pyx_t_8, ((PyObject *)__pyx_v_frame_obj->f_globals)) : __Pyx_PyObject_CallOneArg(__pyx_t_9, ((PyObject *)__pyx_v_frame_obj->f_globals)); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 608, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __pyx_L55:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":585 + * frame.f_trace = main_debugger.trace_dispatch + * + * if can_skip and func_code_info.breakpoint_found: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval new_code', func_code_info.new_code) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":572 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval always skip', func_code_info.always_skip_code) + * if not func_code_info.always_skip_code: # <<<<<<<<<<<<<< + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: + */ + } + } + __pyx_L30:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":611 + * + * finally: + * thread_info.inside_frame_eval -= 1 # <<<<<<<<<<<<<< + * additional_info.is_tracing = False + * + */ + /*finally:*/ { + /*normal exit:*/{ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval - 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":612 + * finally: + * thread_info.inside_frame_eval -= 1 + * additional_info.is_tracing = False # <<<<<<<<<<<<<< + * + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + __pyx_v_additional_info->is_tracing = 0; + goto __pyx_L24; + } + __pyx_L23_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_7 = 0; __pyx_t_6 = 0; __pyx_t_5 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_7, &__pyx_t_6, &__pyx_t_5) < 0)) __Pyx_ErrFetch(&__pyx_t_7, &__pyx_t_6, &__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __pyx_t_11 = __pyx_lineno; __pyx_t_13 = __pyx_clineno; __pyx_t_14 = __pyx_filename; + { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":611 + * + * finally: + * thread_info.inside_frame_eval -= 1 # <<<<<<<<<<<<<< + * additional_info.is_tracing = False + * + */ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval - 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":612 + * finally: + * thread_info.inside_frame_eval -= 1 + * additional_info.is_tracing = False # <<<<<<<<<<<<<< + * + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + __pyx_v_additional_info->is_tracing = 0; + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_ErrRestore(__pyx_t_7, __pyx_t_6, __pyx_t_5); + __pyx_t_7 = 0; __pyx_t_6 = 0; __pyx_t_5 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __pyx_lineno = __pyx_t_11; __pyx_clineno = __pyx_t_13; __pyx_filename = __pyx_t_14; + goto __pyx_L1_error; + } + __pyx_L22_return: { + __pyx_t_18 = __pyx_r; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":611 + * + * finally: + * thread_info.inside_frame_eval -= 1 # <<<<<<<<<<<<<< + * additional_info.is_tracing = False + * + */ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval - 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":612 + * finally: + * thread_info.inside_frame_eval -= 1 + * additional_info.is_tracing = False # <<<<<<<<<<<<<< + * + * return CALL_EvalFrameDefault_38(frame_obj, exc) + */ + __pyx_v_additional_info->is_tracing = 0; + __pyx_r = __pyx_t_18; + goto __pyx_L0; + } + __pyx_L24:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":614 + * additional_info.is_tracing = False + * + * return CALL_EvalFrameDefault_38(frame_obj, exc) # <<<<<<<<<<<<<< + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * ### WARNING: GENERATED CODE, DO NOT EDIT! + */ + __pyx_r = CALL_EvalFrameDefault_38(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":494 + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * cdef PyObject * get_bytecode_while_frame_eval_38(PyFrameObject * frame_obj, int exc): # <<<<<<<<<<<<<< + * ''' + * This function makes the actual evaluation and changes the bytecode to a version + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_WriteUnraisable("_pydevd_frame_eval.pydevd_frame_evaluator.get_bytecode_while_frame_eval_38", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_thread_info); + __Pyx_XDECREF((PyObject *)__pyx_v_additional_info); + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XDECREF(__pyx_v_trace_func); + __Pyx_XDECREF(__pyx_v_apply_to_global); + __Pyx_XDECREF((PyObject *)__pyx_v_func_code_info); + __Pyx_XDECREF(__pyx_v_old); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":623 + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * cdef PyObject * get_bytecode_while_frame_eval_39(PyThreadState* tstate, PyFrameObject * frame_obj, int exc): # <<<<<<<<<<<<<< + * ''' + * This function makes the actual evaluation and changes the bytecode to a version + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytecode_while_frame_eval_39(PyThreadState *__pyx_v_tstate, PyFrameObject *__pyx_v_frame_obj, int __pyx_v_exc) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v_thread_info = 0; + CYTHON_UNUSED int __pyx_v_STATE_SUSPEND; + int __pyx_v_CMD_STEP_INTO; + int __pyx_v_CMD_STEP_OVER; + int __pyx_v_CMD_STEP_OVER_MY_CODE; + int __pyx_v_CMD_STEP_INTO_MY_CODE; + int __pyx_v_CMD_STEP_INTO_COROUTINE; + int __pyx_v_CMD_SMART_STEP_INTO; + int __pyx_v_can_skip; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_additional_info = 0; + PyObject *__pyx_v_main_debugger = 0; + PyObject *__pyx_v_frame = NULL; + PyObject *__pyx_v_trace_func = NULL; + PyObject *__pyx_v_apply_to_global = NULL; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v_func_code_info = 0; + PyObject *__pyx_v_old = NULL; + PyObject *__pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + int __pyx_t_11; + PyObject *(*__pyx_t_12)(PyObject *); + int __pyx_t_13; + char const *__pyx_t_14; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + PyObject *__pyx_t_18; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_bytecode_while_frame_eval_39", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":628 + * where programmatic breakpoints are added. + * ''' + * if GlobalDebuggerHolder is None or _thread_local_info is None or exc: # <<<<<<<<<<<<<< + * # Sometimes during process shutdown these global variables become None + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_GlobalDebuggerHolder); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 628, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = (__pyx_t_2 == Py_None); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_4 = (__pyx_t_3 != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_1 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_thread_local_info); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 628, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = (__pyx_t_2 == Py_None); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_3 = (__pyx_t_4 != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = (__pyx_v_exc != 0); + __pyx_t_1 = __pyx_t_3; + __pyx_L4_bool_binop_done:; + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":630 + * if GlobalDebuggerHolder is None or _thread_local_info is None or exc: + * # Sometimes during process shutdown these global variables become None + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) # <<<<<<<<<<<<<< + * + * # co_filename: str = frame_obj.f_code.co_filename + */ + __pyx_r = CALL_EvalFrameDefault_39(__pyx_v_tstate, __pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":628 + * where programmatic breakpoints are added. + * ''' + * if GlobalDebuggerHolder is None or _thread_local_info is None or exc: # <<<<<<<<<<<<<< + * # Sometimes during process shutdown these global variables become None + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":637 + * + * cdef ThreadInfo thread_info + * cdef int STATE_SUSPEND = 2 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_INTO = 107 + * cdef int CMD_STEP_OVER = 108 + */ + __pyx_v_STATE_SUSPEND = 2; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":638 + * cdef ThreadInfo thread_info + * cdef int STATE_SUSPEND = 2 + * cdef int CMD_STEP_INTO = 107 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_OVER = 108 + * cdef int CMD_STEP_OVER_MY_CODE = 159 + */ + __pyx_v_CMD_STEP_INTO = 0x6B; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":639 + * cdef int STATE_SUSPEND = 2 + * cdef int CMD_STEP_INTO = 107 + * cdef int CMD_STEP_OVER = 108 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_OVER_MY_CODE = 159 + * cdef int CMD_STEP_INTO_MY_CODE = 144 + */ + __pyx_v_CMD_STEP_OVER = 0x6C; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":640 + * cdef int CMD_STEP_INTO = 107 + * cdef int CMD_STEP_OVER = 108 + * cdef int CMD_STEP_OVER_MY_CODE = 159 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_INTO_MY_CODE = 144 + * cdef int CMD_STEP_INTO_COROUTINE = 206 + */ + __pyx_v_CMD_STEP_OVER_MY_CODE = 0x9F; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":641 + * cdef int CMD_STEP_OVER = 108 + * cdef int CMD_STEP_OVER_MY_CODE = 159 + * cdef int CMD_STEP_INTO_MY_CODE = 144 # <<<<<<<<<<<<<< + * cdef int CMD_STEP_INTO_COROUTINE = 206 + * cdef int CMD_SMART_STEP_INTO = 128 + */ + __pyx_v_CMD_STEP_INTO_MY_CODE = 0x90; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":642 + * cdef int CMD_STEP_OVER_MY_CODE = 159 + * cdef int CMD_STEP_INTO_MY_CODE = 144 + * cdef int CMD_STEP_INTO_COROUTINE = 206 # <<<<<<<<<<<<<< + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True + */ + __pyx_v_CMD_STEP_INTO_COROUTINE = 0xCE; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":643 + * cdef int CMD_STEP_INTO_MY_CODE = 144 + * cdef int CMD_STEP_INTO_COROUTINE = 206 + * cdef int CMD_SMART_STEP_INTO = 128 # <<<<<<<<<<<<<< + * cdef bint can_skip = True + * try: + */ + __pyx_v_CMD_SMART_STEP_INTO = 0x80; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":644 + * cdef int CMD_STEP_INTO_COROUTINE = 206 + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True # <<<<<<<<<<<<<< + * try: + * thread_info = _thread_local_info.thread_info + */ + __pyx_v_can_skip = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":645 + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True + * try: # <<<<<<<<<<<<<< + * thread_info = _thread_local_info.thread_info + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_7); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":646 + * cdef bint can_skip = True + * try: + * thread_info = _thread_local_info.thread_info # <<<<<<<<<<<<<< + * except: + * thread_info = get_thread_info(frame_obj) + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_thread_local_info); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 646, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_thread_info); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 646, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!(likely(((__pyx_t_8) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_8, __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo))))) __PYX_ERR(0, 646, __pyx_L7_error) + __pyx_v_thread_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":645 + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True + * try: # <<<<<<<<<<<<<< + * thread_info = _thread_local_info.thread_info + * except: + */ + } + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L12_try_end; + __pyx_L7_error:; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":647 + * try: + * thread_info = _thread_local_info.thread_info + * except: # <<<<<<<<<<<<<< + * thread_info = get_thread_info(frame_obj) + * if thread_info is None: + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_bytecode_while_frame_eval_39", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_2, &__pyx_t_9) < 0) __PYX_ERR(0, 647, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_9); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":648 + * thread_info = _thread_local_info.thread_info + * except: + * thread_info = get_thread_info(frame_obj) # <<<<<<<<<<<<<< + * if thread_info is None: + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + __pyx_t_10 = ((PyObject *)__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_thread_info(__pyx_v_frame_obj)); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 648, __pyx_L9_except_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_XDECREF_SET(__pyx_v_thread_info, ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_t_10)); + __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":649 + * except: + * thread_info = get_thread_info(frame_obj) + * if thread_info is None: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + */ + __pyx_t_1 = (((PyObject *)__pyx_v_thread_info) == Py_None); + __pyx_t_3 = (__pyx_t_1 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":650 + * thread_info = get_thread_info(frame_obj) + * if thread_info is None: + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) # <<<<<<<<<<<<<< + * + * if thread_info.inside_frame_eval: + */ + __pyx_r = CALL_EvalFrameDefault_39(__pyx_v_tstate, __pyx_v_frame_obj, __pyx_v_exc); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L10_except_return; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":649 + * except: + * thread_info = get_thread_info(frame_obj) + * if thread_info is None: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + */ + } + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L8_exception_handled; + } + __pyx_L9_except_error:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":645 + * cdef int CMD_SMART_STEP_INTO = 128 + * cdef bint can_skip = True + * try: # <<<<<<<<<<<<<< + * thread_info = _thread_local_info.thread_info + * except: + */ + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + goto __pyx_L1_error; + __pyx_L10_except_return:; + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + goto __pyx_L0; + __pyx_L8_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + __pyx_L12_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":652 + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + * if thread_info.inside_frame_eval: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + */ + __pyx_t_3 = (__pyx_v_thread_info->inside_frame_eval != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":653 + * + * if thread_info.inside_frame_eval: + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) # <<<<<<<<<<<<<< + * + * if not thread_info.fully_initialized: + */ + __pyx_r = CALL_EvalFrameDefault_39(__pyx_v_tstate, __pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":652 + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + * if thread_info.inside_frame_eval: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":655 + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + * if not thread_info.fully_initialized: # <<<<<<<<<<<<<< + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: + */ + __pyx_t_3 = ((!(__pyx_v_thread_info->fully_initialized != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":656 + * + * if not thread_info.fully_initialized: + * thread_info.initialize_if_possible() # <<<<<<<<<<<<<< + * if not thread_info.fully_initialized: + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + __pyx_t_9 = ((struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v_thread_info->__pyx_vtab)->initialize_if_possible(__pyx_v_thread_info); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 656, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":657 + * if not thread_info.fully_initialized: + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + */ + __pyx_t_3 = ((!(__pyx_v_thread_info->fully_initialized != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":658 + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) # <<<<<<<<<<<<<< + * + * # Can only get additional_info when fully initialized. + */ + __pyx_r = CALL_EvalFrameDefault_39(__pyx_v_tstate, __pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":657 + * if not thread_info.fully_initialized: + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":655 + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * + * if not thread_info.fully_initialized: # <<<<<<<<<<<<<< + * thread_info.initialize_if_possible() + * if not thread_info.fully_initialized: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":661 + * + * # Can only get additional_info when fully initialized. + * cdef PyDBAdditionalThreadInfo additional_info = thread_info.additional_info # <<<<<<<<<<<<<< + * if thread_info.is_pydevd_thread or additional_info.is_tracing: + * # Make sure that we don't trace pydevd threads or inside our own calls. + */ + __pyx_t_9 = ((PyObject *)__pyx_v_thread_info->additional_info); + __Pyx_INCREF(__pyx_t_9); + __pyx_v_additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_9); + __pyx_t_9 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":662 + * # Can only get additional_info when fully initialized. + * cdef PyDBAdditionalThreadInfo additional_info = thread_info.additional_info + * if thread_info.is_pydevd_thread or additional_info.is_tracing: # <<<<<<<<<<<<<< + * # Make sure that we don't trace pydevd threads or inside our own calls. + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + __pyx_t_1 = (__pyx_v_thread_info->is_pydevd_thread != 0); + if (!__pyx_t_1) { + } else { + __pyx_t_3 = __pyx_t_1; + goto __pyx_L20_bool_binop_done; + } + __pyx_t_1 = (__pyx_v_additional_info->is_tracing != 0); + __pyx_t_3 = __pyx_t_1; + __pyx_L20_bool_binop_done:; + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":664 + * if thread_info.is_pydevd_thread or additional_info.is_tracing: + * # Make sure that we don't trace pydevd threads or inside our own calls. + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) # <<<<<<<<<<<<<< + * + * # frame = frame_obj + */ + __pyx_r = CALL_EvalFrameDefault_39(__pyx_v_tstate, __pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":662 + * # Can only get additional_info when fully initialized. + * cdef PyDBAdditionalThreadInfo additional_info = thread_info.additional_info + * if thread_info.is_pydevd_thread or additional_info.is_tracing: # <<<<<<<<<<<<<< + * # Make sure that we don't trace pydevd threads or inside our own calls. + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":671 + * # print('get_bytecode_while_frame_eval', frame.f_lineno, frame.f_code.co_name, frame.f_code.co_filename) + * + * thread_info.inside_frame_eval += 1 # <<<<<<<<<<<<<< + * additional_info.is_tracing = True + * try: + */ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval + 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":672 + * + * thread_info.inside_frame_eval += 1 + * additional_info.is_tracing = True # <<<<<<<<<<<<<< + * try: + * main_debugger: object = GlobalDebuggerHolder.global_dbg + */ + __pyx_v_additional_info->is_tracing = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":673 + * thread_info.inside_frame_eval += 1 + * additional_info.is_tracing = True + * try: # <<<<<<<<<<<<<< + * main_debugger: object = GlobalDebuggerHolder.global_dbg + * if main_debugger is None: + */ + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":674 + * additional_info.is_tracing = True + * try: + * main_debugger: object = GlobalDebuggerHolder.global_dbg # <<<<<<<<<<<<<< + * if main_debugger is None: + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_GlobalDebuggerHolder); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 674, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_global_dbg); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 674, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_v_main_debugger = __pyx_t_2; + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":675 + * try: + * main_debugger: object = GlobalDebuggerHolder.global_dbg + * if main_debugger is None: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * frame = frame_obj + */ + __pyx_t_3 = (__pyx_v_main_debugger == Py_None); + __pyx_t_1 = (__pyx_t_3 != 0); + if (__pyx_t_1) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":676 + * main_debugger: object = GlobalDebuggerHolder.global_dbg + * if main_debugger is None: + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) # <<<<<<<<<<<<<< + * frame = frame_obj + * + */ + __pyx_r = CALL_EvalFrameDefault_39(__pyx_v_tstate, __pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L22_return; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":675 + * try: + * main_debugger: object = GlobalDebuggerHolder.global_dbg + * if main_debugger is None: # <<<<<<<<<<<<<< + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * frame = frame_obj + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":677 + * if main_debugger is None: + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + * frame = frame_obj # <<<<<<<<<<<<<< + * + * if thread_info.thread_trace_func is None: + */ + __pyx_t_2 = ((PyObject *)__pyx_v_frame_obj); + __Pyx_INCREF(__pyx_t_2); + __pyx_v_frame = __pyx_t_2; + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":679 + * frame = frame_obj + * + * if thread_info.thread_trace_func is None: # <<<<<<<<<<<<<< + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: + */ + __pyx_t_1 = (__pyx_v_thread_info->thread_trace_func == Py_None); + __pyx_t_3 = (__pyx_t_1 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":680 + * + * if thread_info.thread_trace_func is None: + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) # <<<<<<<<<<<<<< + * if apply_to_global: + * thread_info.thread_trace_func = trace_func + */ + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_fix_top_level_trace_and_get_trac); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 680, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_8 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_v_main_debugger, __pyx_v_frame}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 680, __pyx_L23_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_v_main_debugger, __pyx_v_frame}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 680, __pyx_L23_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_10 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 680, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_11, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_11, __pyx_v_frame); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_t_10, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 680, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_2))) || (PyList_CheckExact(__pyx_t_2))) { + PyObject* sequence = __pyx_t_2; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 680, __pyx_L23_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_9 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_10 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_9 = PyList_GET_ITEM(sequence, 0); + __pyx_t_10 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(__pyx_t_10); + #else + __pyx_t_9 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 680, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 680, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + #endif + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_8 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 680, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_12 = Py_TYPE(__pyx_t_8)->tp_iternext; + index = 0; __pyx_t_9 = __pyx_t_12(__pyx_t_8); if (unlikely(!__pyx_t_9)) goto __pyx_L27_unpacking_failed; + __Pyx_GOTREF(__pyx_t_9); + index = 1; __pyx_t_10 = __pyx_t_12(__pyx_t_8); if (unlikely(!__pyx_t_10)) goto __pyx_L27_unpacking_failed; + __Pyx_GOTREF(__pyx_t_10); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_12(__pyx_t_8), 2) < 0) __PYX_ERR(0, 680, __pyx_L23_error) + __pyx_t_12 = NULL; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L28_unpacking_done; + __pyx_L27_unpacking_failed:; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_12 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 680, __pyx_L23_error) + __pyx_L28_unpacking_done:; + } + __pyx_v_trace_func = __pyx_t_9; + __pyx_t_9 = 0; + __pyx_v_apply_to_global = __pyx_t_10; + __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":681 + * if thread_info.thread_trace_func is None: + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: # <<<<<<<<<<<<<< + * thread_info.thread_trace_func = trace_func + * + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_apply_to_global); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 681, __pyx_L23_error) + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":682 + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: + * thread_info.thread_trace_func = trace_func # <<<<<<<<<<<<<< + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ + */ + __Pyx_INCREF(__pyx_v_trace_func); + __Pyx_GIVEREF(__pyx_v_trace_func); + __Pyx_GOTREF(__pyx_v_thread_info->thread_trace_func); + __Pyx_DECREF(__pyx_v_thread_info->thread_trace_func); + __pyx_v_thread_info->thread_trace_func = __pyx_v_trace_func; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":681 + * if thread_info.thread_trace_func is None: + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: # <<<<<<<<<<<<<< + * thread_info.thread_trace_func = trace_func + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":679 + * frame = frame_obj + * + * if thread_info.thread_trace_func is None: # <<<<<<<<<<<<<< + * trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + * if apply_to_global: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":684 + * thread_info.thread_trace_func = trace_func + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ # <<<<<<<<<<<<<< + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ + */ + __pyx_t_11 = __pyx_v_additional_info->pydev_step_cmd; + __pyx_t_4 = ((__pyx_t_11 == __pyx_v_CMD_STEP_INTO) != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_1 = __pyx_t_4; + goto __pyx_L33_bool_binop_done; + } + __pyx_t_4 = ((__pyx_t_11 == __pyx_v_CMD_STEP_INTO_MY_CODE) != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_1 = __pyx_t_4; + goto __pyx_L33_bool_binop_done; + } + __pyx_t_4 = ((__pyx_t_11 == __pyx_v_CMD_STEP_INTO_COROUTINE) != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_1 = __pyx_t_4; + goto __pyx_L33_bool_binop_done; + } + __pyx_t_4 = ((__pyx_t_11 == __pyx_v_CMD_SMART_STEP_INTO) != 0); + __pyx_t_1 = __pyx_t_4; + __pyx_L33_bool_binop_done:; + __pyx_t_4 = (__pyx_t_1 != 0); + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":685 + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ + * main_debugger.break_on_caught_exceptions or \ # <<<<<<<<<<<<<< + * main_debugger.break_on_user_uncaught_exceptions or \ + * main_debugger.has_plugin_exception_breaks or \ + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_caught_exceptions); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 685, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 685, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":686 + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ # <<<<<<<<<<<<<< + * main_debugger.has_plugin_exception_breaks or \ + * main_debugger.signature_factory or \ + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_user_uncaught_exception); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 686, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 686, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":687 + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ + * main_debugger.has_plugin_exception_breaks or \ # <<<<<<<<<<<<<< + * main_debugger.signature_factory or \ + * additional_info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and main_debugger.show_return_values and frame.f_back is additional_info.pydev_step_stop: + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_exception_breaks); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 687, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 687, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":688 + * main_debugger.break_on_user_uncaught_exceptions or \ + * main_debugger.has_plugin_exception_breaks or \ + * main_debugger.signature_factory or \ # <<<<<<<<<<<<<< + * additional_info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and main_debugger.show_return_values and frame.f_back is additional_info.pydev_step_stop: + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_signature_factory); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 688, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 688, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (!__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L31_bool_binop_done; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":689 + * main_debugger.has_plugin_exception_breaks or \ + * main_debugger.signature_factory or \ + * additional_info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and main_debugger.show_return_values and frame.f_back is additional_info.pydev_step_stop: # <<<<<<<<<<<<<< + * + * # if DEBUG: + */ + __pyx_t_11 = __pyx_v_additional_info->pydev_step_cmd; + __pyx_t_1 = ((__pyx_t_11 == __pyx_v_CMD_STEP_OVER) != 0); + if (!__pyx_t_1) { + } else { + __pyx_t_4 = __pyx_t_1; + goto __pyx_L42_bool_binop_done; + } + __pyx_t_1 = ((__pyx_t_11 == __pyx_v_CMD_STEP_OVER_MY_CODE) != 0); + __pyx_t_4 = __pyx_t_1; + __pyx_L42_bool_binop_done:; + __pyx_t_1 = (__pyx_t_4 != 0); + if (__pyx_t_1) { + } else { + __pyx_t_3 = __pyx_t_1; + goto __pyx_L31_bool_binop_done; + } + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_show_return_values); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 689, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 689, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__pyx_t_1) { + } else { + __pyx_t_3 = __pyx_t_1; + goto __pyx_L31_bool_binop_done; + } + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 689, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = (__pyx_t_2 == __pyx_v_additional_info->pydev_step_stop); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_4 = (__pyx_t_1 != 0); + __pyx_t_3 = __pyx_t_4; + __pyx_L31_bool_binop_done:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":684 + * thread_info.thread_trace_func = trace_func + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ # <<<<<<<<<<<<<< + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ + */ + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":693 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval enabled trace') + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + __pyx_t_3 = (__pyx_v_thread_info->thread_trace_func != Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":694 + * # print('get_bytecode_while_frame_eval enabled trace') + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func # <<<<<<<<<<<<<< + * else: + * frame.f_trace = main_debugger.trace_dispatch + */ + __pyx_t_2 = __pyx_v_thread_info->thread_trace_func; + __Pyx_INCREF(__pyx_t_2); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_2) < 0) __PYX_ERR(0, 694, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":693 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval enabled trace') + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + goto __pyx_L45; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":696 + * frame.f_trace = thread_info.thread_trace_func + * else: + * frame.f_trace = main_debugger.trace_dispatch # <<<<<<<<<<<<<< + * else: + * func_code_info: FuncCodeInfo = get_func_code_info(thread_info, frame_obj, frame_obj.f_code) + */ + /*else*/ { + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 696, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_10 = __pyx_t_2; + __Pyx_INCREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_10) < 0) __PYX_ERR(0, 696, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __pyx_L45:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":684 + * thread_info.thread_trace_func = trace_func + * + * if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ # <<<<<<<<<<<<<< + * main_debugger.break_on_caught_exceptions or \ + * main_debugger.break_on_user_uncaught_exceptions or \ + */ + goto __pyx_L30; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":698 + * frame.f_trace = main_debugger.trace_dispatch + * else: + * func_code_info: FuncCodeInfo = get_func_code_info(thread_info, frame_obj, frame_obj.f_code) # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval always skip', func_code_info.always_skip_code) + */ + /*else*/ { + __pyx_t_10 = ((PyObject *)__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_func_code_info(__pyx_v_thread_info, __pyx_v_frame_obj, __pyx_v_frame_obj->f_code)); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 698, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_v_func_code_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_t_10); + __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":701 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval always skip', func_code_info.always_skip_code) + * if not func_code_info.always_skip_code: # <<<<<<<<<<<<<< + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: + */ + __pyx_t_4 = ((!(__pyx_v_func_code_info->always_skip_code != 0)) != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":703 + * if not func_code_info.always_skip_code: + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: # <<<<<<<<<<<<<< + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) + * + */ + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_line_breaks); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 703, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_10); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 703, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if (!__pyx_t_3) { + } else { + __pyx_t_4 = __pyx_t_3; + goto __pyx_L48_bool_binop_done; + } + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_exception_breaks); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 703, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_10); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 703, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_4 = __pyx_t_3; + __pyx_L48_bool_binop_done:; + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":704 + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) # <<<<<<<<<<<<<< + * + * if not can_skip: + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_plugin); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 704, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_can_skip); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 704, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_2, __pyx_v_main_debugger, ((PyObject *)__pyx_v_frame_obj)}; + __pyx_t_10 = __Pyx_PyFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 704, __pyx_L23_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_10); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_2, __pyx_v_main_debugger, ((PyObject *)__pyx_v_frame_obj)}; + __pyx_t_10 = __Pyx_PyCFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 704, __pyx_L23_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_10); + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 704, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_2) { + __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_2); __pyx_t_2 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_11, __pyx_v_main_debugger); + __Pyx_INCREF(((PyObject *)__pyx_v_frame_obj)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_frame_obj)); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_11, ((PyObject *)__pyx_v_frame_obj)); + __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_t_8, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 704, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_10); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 704, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_v_can_skip = __pyx_t_4; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":706 + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) + * + * if not can_skip: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval not can_skip') + */ + __pyx_t_4 = ((!(__pyx_v_can_skip != 0)) != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":709 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval not can_skip') + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + __pyx_t_4 = (__pyx_v_thread_info->thread_trace_func != Py_None); + __pyx_t_3 = (__pyx_t_4 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":710 + * # print('get_bytecode_while_frame_eval not can_skip') + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func # <<<<<<<<<<<<<< + * else: + * frame.f_trace = main_debugger.trace_dispatch + */ + __pyx_t_10 = __pyx_v_thread_info->thread_trace_func; + __Pyx_INCREF(__pyx_t_10); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_10) < 0) __PYX_ERR(0, 710, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":709 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval not can_skip') + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + goto __pyx_L51; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":712 + * frame.f_trace = thread_info.thread_trace_func + * else: + * frame.f_trace = main_debugger.trace_dispatch # <<<<<<<<<<<<<< + * + * if can_skip and func_code_info.breakpoint_found: + */ + /*else*/ { + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 712, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_9 = __pyx_t_10; + __Pyx_INCREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_9) < 0) __PYX_ERR(0, 712, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + __pyx_L51:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":706 + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) + * + * if not can_skip: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval not can_skip') + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":703 + * if not func_code_info.always_skip_code: + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: # <<<<<<<<<<<<<< + * can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":714 + * frame.f_trace = main_debugger.trace_dispatch + * + * if can_skip and func_code_info.breakpoint_found: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval new_code', func_code_info.new_code) + */ + __pyx_t_4 = (__pyx_v_can_skip != 0); + if (__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L53_bool_binop_done; + } + __pyx_t_4 = (__pyx_v_func_code_info->breakpoint_found != 0); + __pyx_t_3 = __pyx_t_4; + __pyx_L53_bool_binop_done:; + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":717 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval new_code', func_code_info.new_code) + * if not thread_info.force_stay_in_untraced_mode: # <<<<<<<<<<<<<< + * # If breakpoints are found but new_code is None, + * # this means we weren't able to actually add the code + */ + __pyx_t_3 = ((!(__pyx_v_thread_info->force_stay_in_untraced_mode != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":721 + * # this means we weren't able to actually add the code + * # where needed, so, fallback to tracing. + * if func_code_info.new_code is None: # <<<<<<<<<<<<<< + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func + */ + __pyx_t_3 = (__pyx_v_func_code_info->new_code == Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":722 + * # where needed, so, fallback to tracing. + * if func_code_info.new_code is None: + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + __pyx_t_4 = (__pyx_v_thread_info->thread_trace_func != Py_None); + __pyx_t_3 = (__pyx_t_4 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":723 + * if func_code_info.new_code is None: + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func # <<<<<<<<<<<<<< + * else: + * frame.f_trace = main_debugger.trace_dispatch + */ + __pyx_t_9 = __pyx_v_thread_info->thread_trace_func; + __Pyx_INCREF(__pyx_t_9); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_9) < 0) __PYX_ERR(0, 723, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":722 + * # where needed, so, fallback to tracing. + * if func_code_info.new_code is None: + * if thread_info.thread_trace_func is not None: # <<<<<<<<<<<<<< + * frame.f_trace = thread_info.thread_trace_func + * else: + */ + goto __pyx_L57; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":725 + * frame.f_trace = thread_info.thread_trace_func + * else: + * frame.f_trace = main_debugger.trace_dispatch # <<<<<<<<<<<<<< + * else: + * # print('Using frame eval break for', frame_obj.f_code.co_name) + */ + /*else*/ { + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 725, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = __pyx_t_9; + __Pyx_INCREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_10) < 0) __PYX_ERR(0, 725, __pyx_L23_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __pyx_L57:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":721 + * # this means we weren't able to actually add the code + * # where needed, so, fallback to tracing. + * if func_code_info.new_code is None: # <<<<<<<<<<<<<< + * if thread_info.thread_trace_func is not None: + * frame.f_trace = thread_info.thread_trace_func + */ + goto __pyx_L56; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":728 + * else: + * # print('Using frame eval break for', frame_obj.f_code.co_name) + * update_globals_dict( frame_obj.f_globals) # <<<<<<<<<<<<<< + * Py_INCREF(func_code_info.new_code) + * old = frame_obj.f_code + */ + /*else*/ { + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_update_globals_dict); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 728, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + } + } + __pyx_t_10 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_9, __pyx_t_8, ((PyObject *)__pyx_v_frame_obj->f_globals)) : __Pyx_PyObject_CallOneArg(__pyx_t_9, ((PyObject *)__pyx_v_frame_obj->f_globals)); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 728, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":729 + * # print('Using frame eval break for', frame_obj.f_code.co_name) + * update_globals_dict( frame_obj.f_globals) + * Py_INCREF(func_code_info.new_code) # <<<<<<<<<<<<<< + * old = frame_obj.f_code + * frame_obj.f_code = func_code_info.new_code + */ + __pyx_t_10 = __pyx_v_func_code_info->new_code; + __Pyx_INCREF(__pyx_t_10); + Py_INCREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":730 + * update_globals_dict( frame_obj.f_globals) + * Py_INCREF(func_code_info.new_code) + * old = frame_obj.f_code # <<<<<<<<<<<<<< + * frame_obj.f_code = func_code_info.new_code + * Py_DECREF(old) + */ + __pyx_t_10 = ((PyObject *)__pyx_v_frame_obj->f_code); + __Pyx_INCREF(__pyx_t_10); + __pyx_v_old = __pyx_t_10; + __pyx_t_10 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":731 + * Py_INCREF(func_code_info.new_code) + * old = frame_obj.f_code + * frame_obj.f_code = func_code_info.new_code # <<<<<<<<<<<<<< + * Py_DECREF(old) + * else: + */ + __pyx_v_frame_obj->f_code = ((PyCodeObject *)__pyx_v_func_code_info->new_code); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":732 + * old = frame_obj.f_code + * frame_obj.f_code = func_code_info.new_code + * Py_DECREF(old) # <<<<<<<<<<<<<< + * else: + * # When we're forcing to stay in traced mode we need to + */ + Py_DECREF(__pyx_v_old); + } + __pyx_L56:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":717 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval new_code', func_code_info.new_code) + * if not thread_info.force_stay_in_untraced_mode: # <<<<<<<<<<<<<< + * # If breakpoints are found but new_code is None, + * # this means we weren't able to actually add the code + */ + goto __pyx_L55; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":737 + * # update the globals dict (because this means that we're reusing + * # a previous code which had breakpoints added in a new frame). + * update_globals_dict( frame_obj.f_globals) # <<<<<<<<<<<<<< + * + * finally: + */ + /*else*/ { + __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_update_globals_dict); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 737, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + } + } + __pyx_t_10 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_9, __pyx_t_8, ((PyObject *)__pyx_v_frame_obj->f_globals)) : __Pyx_PyObject_CallOneArg(__pyx_t_9, ((PyObject *)__pyx_v_frame_obj->f_globals)); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 737, __pyx_L23_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __pyx_L55:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":714 + * frame.f_trace = main_debugger.trace_dispatch + * + * if can_skip and func_code_info.breakpoint_found: # <<<<<<<<<<<<<< + * # if DEBUG: + * # print('get_bytecode_while_frame_eval new_code', func_code_info.new_code) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":701 + * # if DEBUG: + * # print('get_bytecode_while_frame_eval always skip', func_code_info.always_skip_code) + * if not func_code_info.always_skip_code: # <<<<<<<<<<<<<< + * + * if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: + */ + } + } + __pyx_L30:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":740 + * + * finally: + * thread_info.inside_frame_eval -= 1 # <<<<<<<<<<<<<< + * additional_info.is_tracing = False + * + */ + /*finally:*/ { + /*normal exit:*/{ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval - 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":741 + * finally: + * thread_info.inside_frame_eval -= 1 + * additional_info.is_tracing = False # <<<<<<<<<<<<<< + * + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + __pyx_v_additional_info->is_tracing = 0; + goto __pyx_L24; + } + __pyx_L23_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_7 = 0; __pyx_t_6 = 0; __pyx_t_5 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_7, &__pyx_t_6, &__pyx_t_5) < 0)) __Pyx_ErrFetch(&__pyx_t_7, &__pyx_t_6, &__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __pyx_t_11 = __pyx_lineno; __pyx_t_13 = __pyx_clineno; __pyx_t_14 = __pyx_filename; + { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":740 + * + * finally: + * thread_info.inside_frame_eval -= 1 # <<<<<<<<<<<<<< + * additional_info.is_tracing = False + * + */ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval - 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":741 + * finally: + * thread_info.inside_frame_eval -= 1 + * additional_info.is_tracing = False # <<<<<<<<<<<<<< + * + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + __pyx_v_additional_info->is_tracing = 0; + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_ErrRestore(__pyx_t_7, __pyx_t_6, __pyx_t_5); + __pyx_t_7 = 0; __pyx_t_6 = 0; __pyx_t_5 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __pyx_lineno = __pyx_t_11; __pyx_clineno = __pyx_t_13; __pyx_filename = __pyx_t_14; + goto __pyx_L1_error; + } + __pyx_L22_return: { + __pyx_t_18 = __pyx_r; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":740 + * + * finally: + * thread_info.inside_frame_eval -= 1 # <<<<<<<<<<<<<< + * additional_info.is_tracing = False + * + */ + __pyx_v_thread_info->inside_frame_eval = (__pyx_v_thread_info->inside_frame_eval - 1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":741 + * finally: + * thread_info.inside_frame_eval -= 1 + * additional_info.is_tracing = False # <<<<<<<<<<<<<< + * + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) + */ + __pyx_v_additional_info->is_tracing = 0; + __pyx_r = __pyx_t_18; + goto __pyx_L0; + } + __pyx_L24:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":743 + * additional_info.is_tracing = False + * + * return CALL_EvalFrameDefault_39(tstate, frame_obj, exc) # <<<<<<<<<<<<<< + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * ### WARNING: GENERATED CODE, DO NOT EDIT! + */ + __pyx_r = CALL_EvalFrameDefault_39(__pyx_v_tstate, __pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":623 + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * ### WARNING: GENERATED CODE, DO NOT EDIT! + * cdef PyObject * get_bytecode_while_frame_eval_39(PyThreadState* tstate, PyFrameObject * frame_obj, int exc): # <<<<<<<<<<<<<< + * ''' + * This function makes the actual evaluation and changes the bytecode to a version + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_WriteUnraisable("_pydevd_frame_eval.pydevd_frame_evaluator.get_bytecode_while_frame_eval_39", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_thread_info); + __Pyx_XDECREF((PyObject *)__pyx_v_additional_info); + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XDECREF(__pyx_v_trace_func); + __Pyx_XDECREF(__pyx_v_apply_to_global); + __Pyx_XDECREF((PyObject *)__pyx_v_func_code_info); + __Pyx_XDECREF(__pyx_v_old); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_ThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_21__pyx_unpickle_ThreadInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_21__pyx_unpickle_ThreadInfo = {"__pyx_unpickle_ThreadInfo", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_21__pyx_unpickle_ThreadInfo, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_21__pyx_unpickle_ThreadInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_ThreadInfo (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ThreadInfo", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ThreadInfo", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_ThreadInfo") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ThreadInfo", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle_ThreadInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_20__pyx_unpickle_ThreadInfo(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_20__pyx_unpickle_ThreadInfo(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_ThreadInfo", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x0af4089, 0xe535b68, 0xb8148ba): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x0af4089, 0xe535b68, 0xb8148ba) = (_can_create_dummy_thread, additional_info, force_stay_in_untraced_mode, fully_initialized, inside_frame_eval, is_pydevd_thread, thread_trace_func))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__6, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0x0af4089, 0xe535b68, 0xb8148ba): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x0af4089, 0xe535b68, 0xb8148ba) = (_can_create_dummy_thread, additional_info, force_stay_in_untraced_mode, fully_initialized, inside_frame_eval, is_pydevd_thread, thread_trace_func))" % __pyx_checksum) + * __pyx_result = ThreadInfo.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0x0af4089, 0xe535b68, 0xb8148ba): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x0af4089, 0xe535b68, 0xb8148ba) = (_can_create_dummy_thread, additional_info, force_stay_in_untraced_mode, fully_initialized, inside_frame_eval, is_pydevd_thread, thread_trace_func))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = ThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x0af4089, 0xe535b68, 0xb8148ba): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x0af4089, 0xe535b68, 0xb8148ba) = (_can_create_dummy_thread, additional_info, force_stay_in_untraced_mode, fully_initialized, inside_frame_eval, is_pydevd_thread, thread_trace_func))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x0af4089, 0xe535b68, 0xb8148ba) = (_can_create_dummy_thread, additional_info, force_stay_in_untraced_mode, fully_initialized, inside_frame_eval, is_pydevd_thread, thread_trace_func))" % __pyx_checksum) + * __pyx_result = ThreadInfo.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_ThreadInfo__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x0af4089, 0xe535b68, 0xb8148ba) = (_can_create_dummy_thread, additional_info, force_stay_in_untraced_mode, fully_initialized, inside_frame_eval, is_pydevd_thread, thread_trace_func))" % __pyx_checksum) + * __pyx_result = ThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = ThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_ThreadInfo__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_ThreadInfo__set_state(ThreadInfo __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle_ThreadInfo__set_state(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x0af4089, 0xe535b68, 0xb8148ba) = (_can_create_dummy_thread, additional_info, force_stay_in_untraced_mode, fully_initialized, inside_frame_eval, is_pydevd_thread, thread_trace_func))" % __pyx_checksum) + * __pyx_result = ThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_ThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_ThreadInfo__set_state(ThreadInfo __pyx_result, tuple __pyx_state): + * __pyx_result._can_create_dummy_thread = __pyx_state[0]; __pyx_result.additional_info = __pyx_state[1]; __pyx_result.force_stay_in_untraced_mode = __pyx_state[2]; __pyx_result.fully_initialized = __pyx_state[3]; __pyx_result.inside_frame_eval = __pyx_state[4]; __pyx_result.is_pydevd_thread = __pyx_state[5]; __pyx_result.thread_trace_func = __pyx_state[6] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_ThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle_ThreadInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_ThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_ThreadInfo__set_state(ThreadInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._can_create_dummy_thread = __pyx_state[0]; __pyx_result.additional_info = __pyx_state[1]; __pyx_result.force_stay_in_untraced_mode = __pyx_state[2]; __pyx_result.fully_initialized = __pyx_state[3]; __pyx_result.inside_frame_eval = __pyx_state[4]; __pyx_result.is_pydevd_thread = __pyx_state[5]; __pyx_result.thread_trace_func = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle_ThreadInfo__set_state(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_ThreadInfo__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_ThreadInfo__set_state(ThreadInfo __pyx_result, tuple __pyx_state): + * __pyx_result._can_create_dummy_thread = __pyx_state[0]; __pyx_result.additional_info = __pyx_state[1]; __pyx_result.force_stay_in_untraced_mode = __pyx_state[2]; __pyx_result.fully_initialized = __pyx_state[3]; __pyx_result.inside_frame_eval = __pyx_state[4]; __pyx_result.is_pydevd_thread = __pyx_state[5]; __pyx_result.thread_trace_func = __pyx_state[6] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[7]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->_can_create_dummy_thread = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->additional_info); + __Pyx_DECREF(((PyObject *)__pyx_v___pyx_result->additional_info)); + __pyx_v___pyx_result->additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->force_stay_in_untraced_mode = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->fully_initialized = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->inside_frame_eval = __pyx_t_3; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->is_pydevd_thread = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->thread_trace_func); + __Pyx_DECREF(__pyx_v___pyx_result->thread_trace_func); + __pyx_v___pyx_result->thread_trace_func = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_ThreadInfo__set_state(ThreadInfo __pyx_result, tuple __pyx_state): + * __pyx_result._can_create_dummy_thread = __pyx_state[0]; __pyx_result.additional_info = __pyx_state[1]; __pyx_result.force_stay_in_untraced_mode = __pyx_state[2]; __pyx_result.fully_initialized = __pyx_state[3]; __pyx_result.inside_frame_eval = __pyx_state[4]; __pyx_result.is_pydevd_thread = __pyx_state[5]; __pyx_result.thread_trace_func = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[7]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_5 = ((__pyx_t_4 > 7) != 0); + if (__pyx_t_5) { + } else { + __pyx_t_2 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + __pyx_t_2 = __pyx_t_6; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result._can_create_dummy_thread = __pyx_state[0]; __pyx_result.additional_info = __pyx_state[1]; __pyx_result.force_stay_in_untraced_mode = __pyx_state[2]; __pyx_result.fully_initialized = __pyx_state[3]; __pyx_result.inside_frame_eval = __pyx_state[4]; __pyx_result.is_pydevd_thread = __pyx_state[5]; __pyx_result.thread_trace_func = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[7]) # <<<<<<<<<<<<<< + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 7, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_ThreadInfo__set_state(ThreadInfo __pyx_result, tuple __pyx_state): + * __pyx_result._can_create_dummy_thread = __pyx_state[0]; __pyx_result.additional_info = __pyx_state[1]; __pyx_result.force_stay_in_untraced_mode = __pyx_state[2]; __pyx_result.fully_initialized = __pyx_state[3]; __pyx_result.inside_frame_eval = __pyx_state[4]; __pyx_result.is_pydevd_thread = __pyx_state[5]; __pyx_result.thread_trace_func = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[7]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_ThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_ThreadInfo__set_state(ThreadInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._can_create_dummy_thread = __pyx_state[0]; __pyx_result.additional_info = __pyx_state[1]; __pyx_result.force_stay_in_untraced_mode = __pyx_state[2]; __pyx_result.fully_initialized = __pyx_state[3]; __pyx_result.inside_frame_eval = __pyx_state[4]; __pyx_result.is_pydevd_thread = __pyx_state[5]; __pyx_result.thread_trace_func = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle_ThreadInfo__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_FuncCodeInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_23__pyx_unpickle_FuncCodeInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_23__pyx_unpickle_FuncCodeInfo = {"__pyx_unpickle_FuncCodeInfo", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_23__pyx_unpickle_FuncCodeInfo, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_23__pyx_unpickle_FuncCodeInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_FuncCodeInfo (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FuncCodeInfo", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FuncCodeInfo", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_FuncCodeInfo") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FuncCodeInfo", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle_FuncCodeInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_22__pyx_unpickle_FuncCodeInfo(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_22__pyx_unpickle_FuncCodeInfo(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_FuncCodeInfo", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xb3ee05d, 0x450d2d6, 0x956dcaa): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xb3ee05d, 0x450d2d6, 0x956dcaa) = (always_skip_code, breakpoint_found, breakpoints_mtime, canonical_normalized_filename, co_filename, co_name, new_code))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__7, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0xb3ee05d, 0x450d2d6, 0x956dcaa): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xb3ee05d, 0x450d2d6, 0x956dcaa) = (always_skip_code, breakpoint_found, breakpoints_mtime, canonical_normalized_filename, co_filename, co_name, new_code))" % __pyx_checksum) + * __pyx_result = FuncCodeInfo.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0xb3ee05d, 0x450d2d6, 0x956dcaa): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xb3ee05d, 0x450d2d6, 0x956dcaa) = (always_skip_code, breakpoint_found, breakpoints_mtime, canonical_normalized_filename, co_filename, co_name, new_code))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = FuncCodeInfo.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xb3ee05d, 0x450d2d6, 0x956dcaa): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xb3ee05d, 0x450d2d6, 0x956dcaa) = (always_skip_code, breakpoint_found, breakpoints_mtime, canonical_normalized_filename, co_filename, co_name, new_code))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xb3ee05d, 0x450d2d6, 0x956dcaa) = (always_skip_code, breakpoint_found, breakpoints_mtime, canonical_normalized_filename, co_filename, co_name, new_code))" % __pyx_checksum) + * __pyx_result = FuncCodeInfo.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_FuncCodeInfo__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xb3ee05d, 0x450d2d6, 0x956dcaa) = (always_skip_code, breakpoint_found, breakpoints_mtime, canonical_normalized_filename, co_filename, co_name, new_code))" % __pyx_checksum) + * __pyx_result = FuncCodeInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_FuncCodeInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = FuncCodeInfo.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_FuncCodeInfo__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_FuncCodeInfo__set_state(FuncCodeInfo __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle_FuncCodeInfo__set_state(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0xb3ee05d, 0x450d2d6, 0x956dcaa) = (always_skip_code, breakpoint_found, breakpoints_mtime, canonical_normalized_filename, co_filename, co_name, new_code))" % __pyx_checksum) + * __pyx_result = FuncCodeInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_FuncCodeInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_FuncCodeInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_FuncCodeInfo__set_state(FuncCodeInfo __pyx_result, tuple __pyx_state): + * __pyx_result.always_skip_code = __pyx_state[0]; __pyx_result.breakpoint_found = __pyx_state[1]; __pyx_result.breakpoints_mtime = __pyx_state[2]; __pyx_result.canonical_normalized_filename = __pyx_state[3]; __pyx_result.co_filename = __pyx_state[4]; __pyx_result.co_name = __pyx_state[5]; __pyx_result.new_code = __pyx_state[6] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_FuncCodeInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle_FuncCodeInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_FuncCodeInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_FuncCodeInfo__set_state(FuncCodeInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.always_skip_code = __pyx_state[0]; __pyx_result.breakpoint_found = __pyx_state[1]; __pyx_result.breakpoints_mtime = __pyx_state[2]; __pyx_result.canonical_normalized_filename = __pyx_state[3]; __pyx_result.co_filename = __pyx_state[4]; __pyx_result.co_name = __pyx_state[5]; __pyx_result.new_code = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle_FuncCodeInfo__set_state(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_FuncCodeInfo__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_FuncCodeInfo__set_state(FuncCodeInfo __pyx_result, tuple __pyx_state): + * __pyx_result.always_skip_code = __pyx_state[0]; __pyx_result.breakpoint_found = __pyx_state[1]; __pyx_result.breakpoints_mtime = __pyx_state[2]; __pyx_result.canonical_normalized_filename = __pyx_state[3]; __pyx_result.co_filename = __pyx_state[4]; __pyx_result.co_name = __pyx_state[5]; __pyx_result.new_code = __pyx_state[6] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[7]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->always_skip_code = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->breakpoint_found = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->breakpoints_mtime = __pyx_t_3; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->canonical_normalized_filename); + __Pyx_DECREF(__pyx_v___pyx_result->canonical_normalized_filename); + __pyx_v___pyx_result->canonical_normalized_filename = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->co_filename); + __Pyx_DECREF(__pyx_v___pyx_result->co_filename); + __pyx_v___pyx_result->co_filename = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->co_name); + __Pyx_DECREF(__pyx_v___pyx_result->co_name); + __pyx_v___pyx_result->co_name = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->new_code); + __Pyx_DECREF(__pyx_v___pyx_result->new_code); + __pyx_v___pyx_result->new_code = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_FuncCodeInfo__set_state(FuncCodeInfo __pyx_result, tuple __pyx_state): + * __pyx_result.always_skip_code = __pyx_state[0]; __pyx_result.breakpoint_found = __pyx_state[1]; __pyx_result.breakpoints_mtime = __pyx_state[2]; __pyx_result.canonical_normalized_filename = __pyx_state[3]; __pyx_result.co_filename = __pyx_state[4]; __pyx_result.co_name = __pyx_state[5]; __pyx_result.new_code = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[7]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_5 = ((__pyx_t_4 > 7) != 0); + if (__pyx_t_5) { + } else { + __pyx_t_2 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + __pyx_t_2 = __pyx_t_6; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result.always_skip_code = __pyx_state[0]; __pyx_result.breakpoint_found = __pyx_state[1]; __pyx_result.breakpoints_mtime = __pyx_state[2]; __pyx_result.canonical_normalized_filename = __pyx_state[3]; __pyx_result.co_filename = __pyx_state[4]; __pyx_result.co_name = __pyx_state[5]; __pyx_result.new_code = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[7]) # <<<<<<<<<<<<<< + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 7, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_FuncCodeInfo__set_state(FuncCodeInfo __pyx_result, tuple __pyx_state): + * __pyx_result.always_skip_code = __pyx_state[0]; __pyx_result.breakpoint_found = __pyx_state[1]; __pyx_result.breakpoints_mtime = __pyx_state[2]; __pyx_result.canonical_normalized_filename = __pyx_state[3]; __pyx_result.co_filename = __pyx_state[4]; __pyx_result.co_name = __pyx_state[5]; __pyx_result.new_code = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[7]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_FuncCodeInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_FuncCodeInfo__set_state(FuncCodeInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.always_skip_code = __pyx_state[0]; __pyx_result.breakpoint_found = __pyx_state[1]; __pyx_result.breakpoints_mtime = __pyx_state[2]; __pyx_result.canonical_normalized_filename = __pyx_state[3]; __pyx_result.co_filename = __pyx_state[4]; __pyx_result.co_name = __pyx_state[5]; __pyx_result.new_code = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle_FuncCodeInfo__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle__CodeLineInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_25__pyx_unpickle__CodeLineInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_25__pyx_unpickle__CodeLineInfo = {"__pyx_unpickle__CodeLineInfo", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_25__pyx_unpickle__CodeLineInfo, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_25__pyx_unpickle__CodeLineInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle__CodeLineInfo (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__CodeLineInfo", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__CodeLineInfo", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle__CodeLineInfo") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__CodeLineInfo", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle__CodeLineInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_24__pyx_unpickle__CodeLineInfo(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_24__pyx_unpickle__CodeLineInfo(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle__CodeLineInfo", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3fbbd02, 0x5a9bcd5, 0x0267473): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3fbbd02, 0x5a9bcd5, 0x0267473) = (first_line, last_line, line_to_offset))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__8, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0x3fbbd02, 0x5a9bcd5, 0x0267473): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3fbbd02, 0x5a9bcd5, 0x0267473) = (first_line, last_line, line_to_offset))" % __pyx_checksum) + * __pyx_result = _CodeLineInfo.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0x3fbbd02, 0x5a9bcd5, 0x0267473): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3fbbd02, 0x5a9bcd5, 0x0267473) = (first_line, last_line, line_to_offset))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = _CodeLineInfo.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_3, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3fbbd02, 0x5a9bcd5, 0x0267473): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3fbbd02, 0x5a9bcd5, 0x0267473) = (first_line, last_line, line_to_offset))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3fbbd02, 0x5a9bcd5, 0x0267473) = (first_line, last_line, line_to_offset))" % __pyx_checksum) + * __pyx_result = _CodeLineInfo.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle__CodeLineInfo__set_state(<_CodeLineInfo> __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3fbbd02, 0x5a9bcd5, 0x0267473) = (first_line, last_line, line_to_offset))" % __pyx_checksum) + * __pyx_result = _CodeLineInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle__CodeLineInfo__set_state(<_CodeLineInfo> __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = _CodeLineInfo.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle__CodeLineInfo__set_state(<_CodeLineInfo> __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle__CodeLineInfo__set_state(_CodeLineInfo __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle__CodeLineInfo__set_state(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3fbbd02, 0x5a9bcd5, 0x0267473) = (first_line, last_line, line_to_offset))" % __pyx_checksum) + * __pyx_result = _CodeLineInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle__CodeLineInfo__set_state(<_CodeLineInfo> __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle__CodeLineInfo__set_state(<_CodeLineInfo> __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle__CodeLineInfo__set_state(_CodeLineInfo __pyx_result, tuple __pyx_state): + * __pyx_result.first_line = __pyx_state[0]; __pyx_result.last_line = __pyx_state[1]; __pyx_result.line_to_offset = __pyx_state[2] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle__CodeLineInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle__CodeLineInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle__CodeLineInfo__set_state(<_CodeLineInfo> __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle__CodeLineInfo__set_state(_CodeLineInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.first_line = __pyx_state[0]; __pyx_result.last_line = __pyx_state[1]; __pyx_result.line_to_offset = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle__CodeLineInfo__set_state(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle__CodeLineInfo__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle__CodeLineInfo__set_state(_CodeLineInfo __pyx_result, tuple __pyx_state): + * __pyx_result.first_line = __pyx_state[0]; __pyx_result.last_line = __pyx_state[1]; __pyx_result.line_to_offset = __pyx_state[2] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[3]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->first_line = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->last_line = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyDict_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->line_to_offset); + __Pyx_DECREF(__pyx_v___pyx_result->line_to_offset); + __pyx_v___pyx_result->line_to_offset = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle__CodeLineInfo__set_state(_CodeLineInfo __pyx_result, tuple __pyx_state): + * __pyx_result.first_line = __pyx_state[0]; __pyx_result.last_line = __pyx_state[1]; __pyx_result.line_to_offset = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[3]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_5 = ((__pyx_t_4 > 3) != 0); + if (__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + __pyx_t_3 = __pyx_t_6; + __pyx_L4_bool_binop_done:; + if (__pyx_t_3) { + + /* "(tree fragment)":14 + * __pyx_result.first_line = __pyx_state[0]; __pyx_result.last_line = __pyx_state[1]; __pyx_result.line_to_offset = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[3]) # <<<<<<<<<<<<<< + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle__CodeLineInfo__set_state(_CodeLineInfo __pyx_result, tuple __pyx_state): + * __pyx_result.first_line = __pyx_state[0]; __pyx_result.last_line = __pyx_state[1]; __pyx_result.line_to_offset = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[3]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle__CodeLineInfo__set_state(<_CodeLineInfo> __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle__CodeLineInfo__set_state(_CodeLineInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.first_line = __pyx_state[0]; __pyx_result.last_line = __pyx_state[1]; __pyx_result.line_to_offset = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle__CodeLineInfo__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle__CacheValue(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_27__pyx_unpickle__CacheValue(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_27__pyx_unpickle__CacheValue = {"__pyx_unpickle__CacheValue", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_27__pyx_unpickle__CacheValue, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_27__pyx_unpickle__CacheValue(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle__CacheValue (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__CacheValue", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__CacheValue", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle__CacheValue") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__CacheValue", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle__CacheValue", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_26__pyx_unpickle__CacheValue(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_26__pyx_unpickle__CacheValue(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle__CacheValue", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3d481b9, 0xac42a46, 0xedff7c3): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d481b9, 0xac42a46, 0xedff7c3) = (breakpoints_hit_at_lines, code_line_info, code_lines_as_set, code_obj_py))" % __pyx_checksum) + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__9, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0x3d481b9, 0xac42a46, 0xedff7c3): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d481b9, 0xac42a46, 0xedff7c3) = (breakpoints_hit_at_lines, code_line_info, code_lines_as_set, code_obj_py))" % __pyx_checksum) + * __pyx_result = _CacheValue.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError); + __pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, -1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0x3d481b9, 0xac42a46, 0xedff7c3): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d481b9, 0xac42a46, 0xedff7c3) = (breakpoints_hit_at_lines, code_line_info, code_lines_as_set, code_obj_py))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = _CacheValue.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_4, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3d481b9, 0xac42a46, 0xedff7c3): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d481b9, 0xac42a46, 0xedff7c3) = (breakpoints_hit_at_lines, code_line_info, code_lines_as_set, code_obj_py))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d481b9, 0xac42a46, 0xedff7c3) = (breakpoints_hit_at_lines, code_line_info, code_lines_as_set, code_obj_py))" % __pyx_checksum) + * __pyx_result = _CacheValue.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle__CacheValue__set_state(<_CacheValue> __pyx_result, __pyx_state) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result = __pyx_t_4; + __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d481b9, 0xac42a46, 0xedff7c3) = (breakpoints_hit_at_lines, code_line_info, code_lines_as_set, code_obj_py))" % __pyx_checksum) + * __pyx_result = _CacheValue.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle__CacheValue__set_state(<_CacheValue> __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_3 = (__pyx_v___pyx_state != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = _CacheValue.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle__CacheValue__set_state(<_CacheValue> __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle__CacheValue__set_state(_CacheValue __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_4 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle__CacheValue__set_state(((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x3d481b9, 0xac42a46, 0xedff7c3) = (breakpoints_hit_at_lines, code_line_info, code_lines_as_set, code_obj_py))" % __pyx_checksum) + * __pyx_result = _CacheValue.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle__CacheValue__set_state(<_CacheValue> __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle__CacheValue__set_state(<_CacheValue> __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle__CacheValue__set_state(_CacheValue __pyx_result, tuple __pyx_state): + * __pyx_result.breakpoints_hit_at_lines = __pyx_state[0]; __pyx_result.code_line_info = __pyx_state[1]; __pyx_result.code_lines_as_set = __pyx_state[2]; __pyx_result.code_obj_py = __pyx_state[3] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle__CacheValue(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle__CacheValue", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle__CacheValue__set_state(<_CacheValue> __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle__CacheValue__set_state(_CacheValue __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.breakpoints_hit_at_lines = __pyx_state[0]; __pyx_result.code_line_info = __pyx_state[1]; __pyx_result.code_lines_as_set = __pyx_state[2]; __pyx_result.code_obj_py = __pyx_state[3] + * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator___pyx_unpickle__CacheValue__set_state(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle__CacheValue__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle__CacheValue__set_state(_CacheValue __pyx_result, tuple __pyx_state): + * __pyx_result.breakpoints_hit_at_lines = __pyx_state[0]; __pyx_result.code_line_info = __pyx_state[1]; __pyx_result.code_lines_as_set = __pyx_state[2]; __pyx_result.code_obj_py = __pyx_state[3] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[4]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PySet_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "set", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->breakpoints_hit_at_lines); + __Pyx_DECREF(__pyx_v___pyx_result->breakpoints_hit_at_lines); + __pyx_v___pyx_result->breakpoints_hit_at_lines = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo))))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->code_line_info); + __Pyx_DECREF(((PyObject *)__pyx_v___pyx_result->code_line_info)); + __pyx_v___pyx_result->code_line_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PySet_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "set", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->code_lines_as_set); + __Pyx_DECREF(__pyx_v___pyx_result->code_lines_as_set); + __pyx_v___pyx_result->code_lines_as_set = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->code_obj_py); + __Pyx_DECREF(__pyx_v___pyx_result->code_obj_py); + __pyx_v___pyx_result->code_obj_py = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle__CacheValue__set_state(_CacheValue __pyx_result, tuple __pyx_state): + * __pyx_result.breakpoints_hit_at_lines = __pyx_state[0]; __pyx_result.code_line_info = __pyx_state[1]; __pyx_result.code_lines_as_set = __pyx_state[2]; __pyx_result.code_obj_py = __pyx_state[3] + * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[4]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_4 = ((__pyx_t_3 > 4) != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result.breakpoints_hit_at_lines = __pyx_state[0]; __pyx_result.code_line_info = __pyx_state[1]; __pyx_result.code_lines_as_set = __pyx_state[2]; __pyx_result.code_obj_py = __pyx_state[3] + * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[4]) # <<<<<<<<<<<<<< + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle__CacheValue__set_state(_CacheValue __pyx_result, tuple __pyx_state): + * __pyx_result.breakpoints_hit_at_lines = __pyx_state[0]; __pyx_result.code_line_info = __pyx_state[1]; __pyx_result.code_lines_as_set = __pyx_state[2]; __pyx_result.code_obj_py = __pyx_state[3] + * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[4]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle__CacheValue__set_state(<_CacheValue> __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle__CacheValue__set_state(_CacheValue __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.breakpoints_hit_at_lines = __pyx_state[0]; __pyx_result.code_line_info = __pyx_state[1]; __pyx_result.code_lines_as_set = __pyx_state[2]; __pyx_result.code_obj_py = __pyx_state[3] + * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.__pyx_unpickle__CacheValue__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} +static struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo __pyx_vtable_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo; + +static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)o); + p->__pyx_vtab = __pyx_vtabptr_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo; + p->additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)Py_None); Py_INCREF(Py_None); + p->thread_trace_func = Py_None; Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo(PyObject *o) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->additional_info); + Py_CLEAR(p->thread_trace_func); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)o; + if (p->additional_info) { + e = (*v)(((PyObject *)p->additional_info), a); if (e) return e; + } + if (p->thread_trace_func) { + e = (*v)(p->thread_trace_func, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *)o; + tmp = ((PyObject*)p->additional_info); + p->additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->thread_trace_func); + p->thread_trace_func = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_additional_info(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_additional_info(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_15additional_info_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_is_pydevd_thread(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_is_pydevd_thread(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_16is_pydevd_thread_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_inside_frame_eval(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_inside_frame_eval(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17inside_frame_eval_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_fully_initialized(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_fully_initialized(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17fully_initialized_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_thread_trace_func(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_thread_trace_func(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_17thread_trace_func_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_force_stay_in_untraced_mode(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_force_stay_in_untraced_mode(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_27force_stay_in_untraced_mode_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyMethodDef __pyx_methods_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_1__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_3__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo[] = { + {(char *)"additional_info", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_additional_info, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_additional_info, (char *)0, 0}, + {(char *)"is_pydevd_thread", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_is_pydevd_thread, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_is_pydevd_thread, (char *)0, 0}, + {(char *)"inside_frame_eval", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_inside_frame_eval, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_inside_frame_eval, (char *)0, 0}, + {(char *)"fully_initialized", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_fully_initialized, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_fully_initialized, (char *)0, 0}, + {(char *)"thread_trace_func", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_thread_trace_func, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_thread_trace_func, (char *)0, 0}, + {(char *)"force_stay_in_untraced_mode", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_force_stay_in_untraced_mode, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_force_stay_in_untraced_mode, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_frame_eval.pydevd_frame_evaluator.ThreadInfo", /*tp_name*/ + sizeof(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo, /*tp_traverse*/ + __pyx_tp_clear_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)o); + p->co_filename = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->co_name = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->canonical_normalized_filename = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->new_code = Py_None; Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo(PyObject *o) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->co_filename); + Py_CLEAR(p->co_name); + Py_CLEAR(p->canonical_normalized_filename); + Py_CLEAR(p->new_code); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)o; + if (p->new_code) { + e = (*v)(p->new_code, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo *)o; + tmp = ((PyObject*)p->new_code); + p->new_code = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_co_filename(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_co_filename(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_11co_filename_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_co_name(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_co_name(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_7co_name_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_canonical_normalized_filename(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_canonical_normalized_filename(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_29canonical_normalized_filename_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_breakpoint_found(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_breakpoint_found(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_16breakpoint_found_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_new_code(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_new_code(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_8new_code_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_breakpoints_mtime(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_breakpoints_mtime(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_17breakpoints_mtime_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyMethodDef __pyx_methods_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_3__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_5__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo[] = { + {(char *)"co_filename", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_co_filename, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_co_filename, (char *)0, 0}, + {(char *)"co_name", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_co_name, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_co_name, (char *)0, 0}, + {(char *)"canonical_normalized_filename", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_canonical_normalized_filename, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_canonical_normalized_filename, (char *)0, 0}, + {(char *)"breakpoint_found", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_breakpoint_found, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_breakpoint_found, (char *)0, 0}, + {(char *)"new_code", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_new_code, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_new_code, (char *)0, 0}, + {(char *)"breakpoints_mtime", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_breakpoints_mtime, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_breakpoints_mtime, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_frame_eval.pydevd_frame_evaluator.FuncCodeInfo", /*tp_name*/ + sizeof(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo, /*tp_traverse*/ + __pyx_tp_clear_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_12FuncCodeInfo_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)o); + p->line_to_offset = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo(PyObject *o) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->line_to_offset); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)o; + if (p->line_to_offset) { + e = (*v)(p->line_to_offset, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)o; + tmp = ((PyObject*)p->line_to_offset); + p->line_to_offset = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_line_to_offset(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_line_to_offset(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_14line_to_offset_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_first_line(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_first_line(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_10first_line_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_last_line(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_last_line(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_9last_line_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyMethodDef __pyx_methods_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_3__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_5__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo[] = { + {(char *)"line_to_offset", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_line_to_offset, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_line_to_offset, (char *)0, 0}, + {(char *)"first_line", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_first_line, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_first_line, (char *)0, 0}, + {(char *)"last_line", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_last_line, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_last_line, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_frame_eval.pydevd_frame_evaluator._CodeLineInfo", /*tp_name*/ + sizeof(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo, /*tp_traverse*/ + __pyx_tp_clear_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_13_CodeLineInfo_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; +static struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue __pyx_vtable_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue; + +static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)o); + p->__pyx_vtab = __pyx_vtabptr_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue; + p->code_obj_py = Py_None; Py_INCREF(Py_None); + p->code_line_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)Py_None); Py_INCREF(Py_None); + p->breakpoints_hit_at_lines = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->code_lines_as_set = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue(PyObject *o) { + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->code_obj_py); + Py_CLEAR(p->code_line_info); + Py_CLEAR(p->breakpoints_hit_at_lines); + Py_CLEAR(p->code_lines_as_set); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)o; + if (p->code_obj_py) { + e = (*v)(p->code_obj_py, a); if (e) return e; + } + if (p->code_line_info) { + e = (*v)(((PyObject *)p->code_line_info), a); if (e) return e; + } + if (p->breakpoints_hit_at_lines) { + e = (*v)(p->breakpoints_hit_at_lines, a); if (e) return e; + } + if (p->code_lines_as_set) { + e = (*v)(p->code_lines_as_set, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *p = (struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *)o; + tmp = ((PyObject*)p->code_obj_py); + p->code_obj_py = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->code_line_info); + p->code_line_info = ((struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo *)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->breakpoints_hit_at_lines); + p->breakpoints_hit_at_lines = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->code_lines_as_set); + p->code_lines_as_set = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_obj_py(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_obj_py(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_11code_obj_py_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_line_info(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_line_info(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_14code_line_info_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_breakpoints_hit_at_lines(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_breakpoints_hit_at_lines(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_24breakpoints_hit_at_lines_5__del__(o); + } +} + +static PyObject *__pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_lines_as_set(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_1__get__(o); +} + +static int __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_lines_as_set(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_3__set__(o, v); + } + else { + return __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_17code_lines_as_set_5__del__(o); + } +} + +static PyMethodDef __pyx_methods_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue[] = { + {"compute_force_stay_in_untraced_mode", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_3compute_force_stay_in_untraced_mode, METH_O, __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_2compute_force_stay_in_untraced_mode}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_5__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_7__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue[] = { + {(char *)"code_obj_py", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_obj_py, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_obj_py, (char *)0, 0}, + {(char *)"code_line_info", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_line_info, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_line_info, (char *)0, 0}, + {(char *)"breakpoints_hit_at_lines", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_breakpoints_hit_at_lines, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_breakpoints_hit_at_lines, (char *)0, 0}, + {(char *)"code_lines_as_set", __pyx_getprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_lines_as_set, __pyx_setprop_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_code_lines_as_set, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_frame_eval.pydevd_frame_evaluator._CacheValue", /*tp_name*/ + sizeof(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue, /*tp_traverse*/ + __pyx_tp_clear_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_pydevd_frame_evaluator(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_pydevd_frame_evaluator}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "pydevd_frame_evaluator", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_, __pyx_k_, sizeof(__pyx_k_), 0, 0, 1, 0}, + {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1}, + {&__pyx_n_s_CacheValue, __pyx_k_CacheValue, sizeof(__pyx_k_CacheValue), 0, 0, 1, 1}, + {&__pyx_n_s_CodeLineInfo, __pyx_k_CodeLineInfo, sizeof(__pyx_k_CodeLineInfo), 0, 0, 1, 1}, + {&__pyx_n_s_DebugHelper, __pyx_k_DebugHelper, sizeof(__pyx_k_DebugHelper), 0, 0, 1, 1}, + {&__pyx_n_s_FuncCodeInfo, __pyx_k_FuncCodeInfo, sizeof(__pyx_k_FuncCodeInfo), 0, 0, 1, 1}, + {&__pyx_n_s_GlobalDebuggerHolder, __pyx_k_GlobalDebuggerHolder, sizeof(__pyx_k_GlobalDebuggerHolder), 0, 0, 1, 1}, + {&__pyx_kp_s_If_a_code_object_is_cached_that, __pyx_k_If_a_code_object_is_cached_that, sizeof(__pyx_k_If_a_code_object_is_cached_that), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2, __pyx_k_Incompatible_checksums_0x_x_vs_0_2, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_2), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_3, __pyx_k_Incompatible_checksums_0x_x_vs_0_3, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_3), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_4, __pyx_k_Incompatible_checksums_0x_x_vs_0_4, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_4), 0, 0, 1, 0}, + {&__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER, __pyx_k_NORM_PATHS_AND_BASE_CONTAINER, sizeof(__pyx_k_NORM_PATHS_AND_BASE_CONTAINER), 0, 0, 1, 1}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_SetTrace, __pyx_k_SetTrace, sizeof(__pyx_k_SetTrace), 0, 0, 1, 1}, + {&__pyx_n_s_ThreadInfo, __pyx_k_ThreadInfo, sizeof(__pyx_k_ThreadInfo), 0, 0, 1, 1}, + {&__pyx_kp_s__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 0, 1, 0}, + {&__pyx_kp_s__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 0, 1, 0}, + {&__pyx_kp_s__5, __pyx_k__5, sizeof(__pyx_k__5), 0, 0, 1, 0}, + {&__pyx_n_s_active, __pyx_k_active, sizeof(__pyx_k_active), 0, 0, 1, 1}, + {&__pyx_n_s_additional_info, __pyx_k_additional_info, sizeof(__pyx_k_additional_info), 0, 0, 1, 1}, + {&__pyx_n_s_arg, __pyx_k_arg, sizeof(__pyx_k_arg), 0, 0, 1, 1}, + {&__pyx_n_s_bootstrap, __pyx_k_bootstrap, sizeof(__pyx_k_bootstrap), 0, 0, 1, 1}, + {&__pyx_n_s_bootstrap_2, __pyx_k_bootstrap_2, sizeof(__pyx_k_bootstrap_2), 0, 0, 1, 1}, + {&__pyx_n_s_bootstrap_inner, __pyx_k_bootstrap_inner, sizeof(__pyx_k_bootstrap_inner), 0, 0, 1, 1}, + {&__pyx_n_s_bootstrap_inner_2, __pyx_k_bootstrap_inner_2, sizeof(__pyx_k_bootstrap_inner_2), 0, 0, 1, 1}, + {&__pyx_n_s_break_on_caught_exceptions, __pyx_k_break_on_caught_exceptions, sizeof(__pyx_k_break_on_caught_exceptions), 0, 0, 1, 1}, + {&__pyx_n_s_break_on_user_uncaught_exception, __pyx_k_break_on_user_uncaught_exception, sizeof(__pyx_k_break_on_user_uncaught_exception), 0, 0, 1, 1}, + {&__pyx_n_s_breakpoints, __pyx_k_breakpoints, sizeof(__pyx_k_breakpoints), 0, 0, 1, 1}, + {&__pyx_n_s_breakpoints_hit_at_lines, __pyx_k_breakpoints_hit_at_lines, sizeof(__pyx_k_breakpoints_hit_at_lines), 0, 0, 1, 1}, + {&__pyx_n_s_cache, __pyx_k_cache, sizeof(__pyx_k_cache), 0, 0, 1, 1}, + {&__pyx_n_s_call, __pyx_k_call, sizeof(__pyx_k_call), 0, 0, 1, 1}, + {&__pyx_n_s_call_2, __pyx_k_call_2, sizeof(__pyx_k_call_2), 0, 0, 1, 1}, + {&__pyx_n_s_can_skip, __pyx_k_can_skip, sizeof(__pyx_k_can_skip), 0, 0, 1, 1}, + {&__pyx_n_s_clear_thread_local_info, __pyx_k_clear_thread_local_info, sizeof(__pyx_k_clear_thread_local_info), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_code_line_info, __pyx_k_code_line_info, sizeof(__pyx_k_code_line_info), 0, 0, 1, 1}, + {&__pyx_n_s_code_obj, __pyx_k_code_obj, sizeof(__pyx_k_code_obj), 0, 0, 1, 1}, + {&__pyx_n_s_code_obj_py, __pyx_k_code_obj_py, sizeof(__pyx_k_code_obj_py), 0, 0, 1, 1}, + {&__pyx_n_s_compute_force_stay_in_untraced_m, __pyx_k_compute_force_stay_in_untraced_m, sizeof(__pyx_k_compute_force_stay_in_untraced_m), 0, 0, 1, 1}, + {&__pyx_n_s_current_thread, __pyx_k_current_thread, sizeof(__pyx_k_current_thread), 0, 0, 1, 1}, + {&__pyx_n_s_decref_py, __pyx_k_decref_py, sizeof(__pyx_k_decref_py), 0, 0, 1, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_dis, __pyx_k_dis, sizeof(__pyx_k_dis), 0, 0, 1, 1}, + {&__pyx_n_s_dummy_trace_dispatch, __pyx_k_dummy_trace_dispatch, sizeof(__pyx_k_dummy_trace_dispatch), 0, 0, 1, 1}, + {&__pyx_n_s_dummy_tracing_holder, __pyx_k_dummy_tracing_holder, sizeof(__pyx_k_dummy_tracing_holder), 0, 0, 1, 1}, + {&__pyx_n_s_enter, __pyx_k_enter, sizeof(__pyx_k_enter), 0, 0, 1, 1}, + {&__pyx_n_s_event, __pyx_k_event, sizeof(__pyx_k_event), 0, 0, 1, 1}, + {&__pyx_n_s_exec, __pyx_k_exec, sizeof(__pyx_k_exec), 0, 0, 1, 1}, + {&__pyx_n_s_exit, __pyx_k_exit, sizeof(__pyx_k_exit), 0, 0, 1, 1}, + {&__pyx_n_s_f_back, __pyx_k_f_back, sizeof(__pyx_k_f_back), 0, 0, 1, 1}, + {&__pyx_n_s_f_trace, __pyx_k_f_trace, sizeof(__pyx_k_f_trace), 0, 0, 1, 1}, + {&__pyx_n_s_findlinestarts, __pyx_k_findlinestarts, sizeof(__pyx_k_findlinestarts), 0, 0, 1, 1}, + {&__pyx_n_s_first_line, __pyx_k_first_line, sizeof(__pyx_k_first_line), 0, 0, 1, 1}, + {&__pyx_n_s_fix_top_level_trace_and_get_trac, __pyx_k_fix_top_level_trace_and_get_trac, sizeof(__pyx_k_fix_top_level_trace_and_get_trac), 0, 0, 1, 1}, + {&__pyx_n_s_frame, __pyx_k_frame, sizeof(__pyx_k_frame), 0, 0, 1, 1}, + {&__pyx_n_s_frame_eval_func, __pyx_k_frame_eval_func, sizeof(__pyx_k_frame_eval_func), 0, 0, 1, 1}, + {&__pyx_n_s_function_breakpoint_name_to_brea, __pyx_k_function_breakpoint_name_to_brea, sizeof(__pyx_k_function_breakpoint_name_to_brea), 0, 0, 1, 1}, + {&__pyx_n_s_generate_code_with_breakpoints_p, __pyx_k_generate_code_with_breakpoints_p, sizeof(__pyx_k_generate_code_with_breakpoints_p), 0, 0, 1, 1}, + {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, + {&__pyx_n_s_get_abs_path_real_path_and_base, __pyx_k_get_abs_path_real_path_and_base, sizeof(__pyx_k_get_abs_path_real_path_and_base), 0, 0, 1, 1}, + {&__pyx_n_s_get_cache_file_type, __pyx_k_get_cache_file_type, sizeof(__pyx_k_get_cache_file_type), 0, 0, 1, 1}, + {&__pyx_n_s_get_cached_code_obj_info_py, __pyx_k_get_cached_code_obj_info_py, sizeof(__pyx_k_get_cached_code_obj_info_py), 0, 0, 1, 1}, + {&__pyx_n_s_get_code_line_info, __pyx_k_get_code_line_info, sizeof(__pyx_k_get_code_line_info), 0, 0, 1, 1}, + {&__pyx_n_s_get_file_type, __pyx_k_get_file_type, sizeof(__pyx_k_get_file_type), 0, 0, 1, 1}, + {&__pyx_n_s_get_func_code_info_py, __pyx_k_get_func_code_info_py, sizeof(__pyx_k_get_func_code_info_py), 0, 0, 1, 1}, + {&__pyx_n_s_get_ident, __pyx_k_get_ident, sizeof(__pyx_k_get_ident), 0, 0, 1, 1}, + {&__pyx_n_s_get_ident_2, __pyx_k_get_ident_2, sizeof(__pyx_k_get_ident_2), 0, 0, 1, 1}, + {&__pyx_n_s_get_thread_info_py, __pyx_k_get_thread_info_py, sizeof(__pyx_k_get_thread_info_py), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_global_dbg, __pyx_k_global_dbg, sizeof(__pyx_k_global_dbg), 0, 0, 1, 1}, + {&__pyx_n_s_has_plugin_exception_breaks, __pyx_k_has_plugin_exception_breaks, sizeof(__pyx_k_has_plugin_exception_breaks), 0, 0, 1, 1}, + {&__pyx_n_s_has_plugin_line_breaks, __pyx_k_has_plugin_line_breaks, sizeof(__pyx_k_has_plugin_line_breaks), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_insert_pydevd_breaks, __pyx_k_insert_pydevd_breaks, sizeof(__pyx_k_insert_pydevd_breaks), 0, 0, 1, 1}, + {&__pyx_n_s_intersection, __pyx_k_intersection, sizeof(__pyx_k_intersection), 0, 0, 1, 1}, + {&__pyx_n_s_is_pydev_daemon_thread, __pyx_k_is_pydev_daemon_thread, sizeof(__pyx_k_is_pydev_daemon_thread), 0, 0, 1, 1}, + {&__pyx_n_s_issuperset, __pyx_k_issuperset, sizeof(__pyx_k_issuperset), 0, 0, 1, 1}, + {&__pyx_n_s_last_line, __pyx_k_last_line, sizeof(__pyx_k_last_line), 0, 0, 1, 1}, + {&__pyx_n_s_line, __pyx_k_line, sizeof(__pyx_k_line), 0, 0, 1, 1}, + {&__pyx_n_s_line_to_offset, __pyx_k_line_to_offset, sizeof(__pyx_k_line_to_offset), 0, 0, 1, 1}, + {&__pyx_n_s_local, __pyx_k_local, sizeof(__pyx_k_local), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_main_2, __pyx_k_main_2, sizeof(__pyx_k_main_2), 0, 0, 1, 1}, + {&__pyx_n_s_max, __pyx_k_max, sizeof(__pyx_k_max), 0, 0, 1, 1}, + {&__pyx_n_s_min, __pyx_k_min, sizeof(__pyx_k_min), 0, 0, 1, 1}, + {&__pyx_n_s_mtime, __pyx_k_mtime, sizeof(__pyx_k_mtime), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1}, + {&__pyx_n_s_offset, __pyx_k_offset, sizeof(__pyx_k_offset), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_plugin, __pyx_k_plugin, sizeof(__pyx_k_plugin), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_bundle__pydev_saved_modul, __pyx_k_pydev_bundle__pydev_saved_modul, sizeof(__pyx_k_pydev_bundle__pydev_saved_modul), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_monkey, __pyx_k_pydev_monkey, sizeof(__pyx_k_pydev_monkey), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd, __pyx_k_pydevd, sizeof(__pyx_k_pydevd), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_additional, __pyx_k_pydevd_bundle_pydevd_additional, sizeof(__pyx_k_pydevd_bundle_pydevd_additional), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_k_pydevd_bundle_pydevd_constants, sizeof(__pyx_k_pydevd_bundle_pydevd_constants), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_trace_disp, __pyx_k_pydevd_bundle_pydevd_trace_disp, sizeof(__pyx_k_pydevd_bundle_pydevd_trace_disp), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_file_utils, __pyx_k_pydevd_file_utils, sizeof(__pyx_k_pydevd_file_utils), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_frame_eval_pydevd_frame, __pyx_k_pydevd_frame_eval_pydevd_frame, sizeof(__pyx_k_pydevd_frame_eval_pydevd_frame), 0, 0, 1, 1}, + {&__pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_k_pydevd_frame_eval_pydevd_frame_2, sizeof(__pyx_k_pydevd_frame_eval_pydevd_frame_2), 0, 0, 1, 0}, + {&__pyx_n_s_pydevd_frame_eval_pydevd_frame_3, __pyx_k_pydevd_frame_eval_pydevd_frame_3, sizeof(__pyx_k_pydevd_frame_eval_pydevd_frame_3), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_frame_eval_pydevd_modify, __pyx_k_pydevd_frame_eval_pydevd_modify, sizeof(__pyx_k_pydevd_frame_eval_pydevd_modify), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_tracing, __pyx_k_pydevd_tracing, sizeof(__pyx_k_pydevd_tracing), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_FuncCodeInfo, __pyx_k_pyx_unpickle_FuncCodeInfo, sizeof(__pyx_k_pyx_unpickle_FuncCodeInfo), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_ThreadInfo, __pyx_k_pyx_unpickle_ThreadInfo, sizeof(__pyx_k_pyx_unpickle_ThreadInfo), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle__CacheValue, __pyx_k_pyx_unpickle__CacheValue, sizeof(__pyx_k_pyx_unpickle__CacheValue), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle__CodeLineInfo, __pyx_k_pyx_unpickle__CodeLineInfo, sizeof(__pyx_k_pyx_unpickle__CodeLineInfo), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_rfind, __pyx_k_rfind, sizeof(__pyx_k_rfind), 0, 0, 1, 1}, + {&__pyx_n_s_run, __pyx_k_run, sizeof(__pyx_k_run), 0, 0, 1, 1}, + {&__pyx_n_s_set_additional_thread_info_lock, __pyx_k_set_additional_thread_info_lock, sizeof(__pyx_k_set_additional_thread_info_lock), 0, 0, 1, 1}, + {&__pyx_n_s_set_trace_func, __pyx_k_set_trace_func, sizeof(__pyx_k_set_trace_func), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_show_return_values, __pyx_k_show_return_values, sizeof(__pyx_k_show_return_values), 0, 0, 1, 1}, + {&__pyx_n_s_signature_factory, __pyx_k_signature_factory, sizeof(__pyx_k_signature_factory), 0, 0, 1, 1}, + {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, + {&__pyx_n_s_stop_frame_eval, __pyx_k_stop_frame_eval, sizeof(__pyx_k_stop_frame_eval), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_sys, __pyx_k_sys, sizeof(__pyx_k_sys), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_thread, __pyx_k_thread, sizeof(__pyx_k_thread), 0, 0, 1, 1}, + {&__pyx_n_s_thread_active, __pyx_k_thread_active, sizeof(__pyx_k_thread_active), 0, 0, 1, 1}, + {&__pyx_n_s_thread_info, __pyx_k_thread_info, sizeof(__pyx_k_thread_info), 0, 0, 1, 1}, + {&__pyx_n_s_thread_local_info, __pyx_k_thread_local_info, sizeof(__pyx_k_thread_local_info), 0, 0, 1, 1}, + {&__pyx_n_s_threading, __pyx_k_threading, sizeof(__pyx_k_threading), 0, 0, 1, 1}, + {&__pyx_n_s_trace_dispatch, __pyx_k_trace_dispatch, sizeof(__pyx_k_trace_dispatch), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_n_s_update_globals_dict, __pyx_k_update_globals_dict, sizeof(__pyx_k_update_globals_dict), 0, 0, 1, 1}, + {&__pyx_n_s_version_info, __pyx_k_version_info, sizeof(__pyx_k_version_info), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 110, __pyx_L1_error) + __pyx_builtin_min = __Pyx_GetBuiltinName(__pyx_n_s_min); if (!__pyx_builtin_min) __PYX_ERR(0, 341, __pyx_L1_error) + __pyx_builtin_max = __Pyx_GetBuiltinName(__pyx_n_s_max); if (!__pyx_builtin_max) __PYX_ERR(0, 342, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":112 + * raise AttributeError() + * except: + * with _set_additional_thread_info_lock: # <<<<<<<<<<<<<< + * # If it's not there, set it within a lock to avoid any racing + * # conditions. + */ + __pyx_tuple__4 = PyTuple_Pack(3, Py_None, Py_None, Py_None); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 112, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x0af4089, 0xe535b68, 0xb8148ba): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x0af4089, 0xe535b68, 0xb8148ba) = (_can_create_dummy_thread, additional_info, force_stay_in_untraced_mode, fully_initialized, inside_frame_eval, is_pydevd_thread, thread_trace_func))" % __pyx_checksum) + */ + __pyx_tuple__6 = PyTuple_Pack(3, __pyx_int_11485321, __pyx_int_240343912, __pyx_int_193022138); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + __pyx_tuple__7 = PyTuple_Pack(3, __pyx_int_188670045, __pyx_int_72405718, __pyx_int_156687530); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + __pyx_tuple__8 = PyTuple_Pack(3, __pyx_int_66829570, __pyx_int_95010005, __pyx_int_2520179); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + __pyx_tuple__9 = PyTuple_Pack(3, __pyx_int_64258489, __pyx_int_180628038, __pyx_int_249558979); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":19 + * _thread_active = threading._active + * + * def clear_thread_local_info(): # <<<<<<<<<<<<<< + * global _thread_local_info + * _thread_local_info = threading.local() + */ + __pyx_codeobj__10 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_clear_thread_local_info, 19, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__10)) __PYX_ERR(0, 19, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":152 + * + * + * def dummy_trace_dispatch(frame, str event, arg): # <<<<<<<<<<<<<< + * if event == 'call': + * if frame.f_trace is not None: + */ + __pyx_tuple__11 = PyTuple_Pack(3, __pyx_n_s_frame, __pyx_n_s_event, __pyx_n_s_arg); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 152, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); + __pyx_codeobj__12 = (PyObject*)__Pyx_PyCode_New(3, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__11, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_dummy_trace_dispatch, 152, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__12)) __PYX_ERR(0, 152, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":159 + * + * + * def get_thread_info_py() -> ThreadInfo: # <<<<<<<<<<<<<< + * return get_thread_info(PyEval_GetFrame()) + * + */ + __pyx_codeobj__13 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_get_thread_info_py, 159, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__13)) __PYX_ERR(0, 159, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":196 + * + * + * def decref_py(obj): # <<<<<<<<<<<<<< + * ''' + * Helper to be called from Python. + */ + __pyx_tuple__14 = PyTuple_Pack(1, __pyx_n_s_obj); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(0, 196, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__14); + __Pyx_GIVEREF(__pyx_tuple__14); + __pyx_codeobj__15 = (PyObject*)__Pyx_PyCode_New(1, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__14, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_decref_py, 196, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__15)) __PYX_ERR(0, 196, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":203 + * + * + * def get_func_code_info_py(thread_info, frame, code_obj) -> FuncCodeInfo: # <<<<<<<<<<<<<< + * ''' + * Helper to be called from Python. + */ + __pyx_tuple__16 = PyTuple_Pack(3, __pyx_n_s_thread_info, __pyx_n_s_frame, __pyx_n_s_code_obj); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(0, 203, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__16); + __Pyx_GIVEREF(__pyx_tuple__16); + __pyx_codeobj__17 = (PyObject*)__Pyx_PyCode_New(3, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__16, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_get_func_code_info_py, 203, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__17)) __PYX_ERR(0, 203, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":329 + * + * # Note: this method has a version in pure-python too. + * def _get_code_line_info(code_obj): # <<<<<<<<<<<<<< + * line_to_offset: dict = {} + * first_line: int = None + */ + __pyx_tuple__18 = PyTuple_Pack(6, __pyx_n_s_code_obj, __pyx_n_s_line_to_offset, __pyx_n_s_first_line, __pyx_n_s_last_line, __pyx_n_s_offset, __pyx_n_s_line); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__18); + __Pyx_GIVEREF(__pyx_tuple__18); + __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(1, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__18, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_get_code_line_info, 329, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(0, 329, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":353 + * _cache: dict = {} + * + * def get_cached_code_obj_info_py(code_obj_py): # <<<<<<<<<<<<<< + * ''' + * :return _CacheValue: + */ + __pyx_tuple__20 = PyTuple_Pack(1, __pyx_n_s_code_obj_py); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(0, 353, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__20); + __Pyx_GIVEREF(__pyx_tuple__20); + __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(1, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_get_cached_code_obj_info_py, 353, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(0, 353, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":401 + * return breakpoint_found, force_stay_in_untraced_mode + * + * def generate_code_with_breakpoints_py(object code_obj_py, dict breakpoints): # <<<<<<<<<<<<<< + * return generate_code_with_breakpoints(code_obj_py, breakpoints) + * + */ + __pyx_tuple__22 = PyTuple_Pack(2, __pyx_n_s_code_obj_py, __pyx_n_s_breakpoints); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__22); + __Pyx_GIVEREF(__pyx_tuple__22); + __pyx_codeobj__23 = (PyObject*)__Pyx_PyCode_New(2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_generate_code_with_breakpoints_p, 401, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__23)) __PYX_ERR(0, 401, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":473 + * import sys + * + * cdef bint IS_PY_39_OWNARDS = sys.version_info[:2] >= (3, 9) # <<<<<<<<<<<<<< + * + * def frame_eval_func(): + */ + __pyx_slice__24 = PySlice_New(Py_None, __pyx_int_2, Py_None); if (unlikely(!__pyx_slice__24)) __PYX_ERR(0, 473, __pyx_L1_error) + __Pyx_GOTREF(__pyx_slice__24); + __Pyx_GIVEREF(__pyx_slice__24); + __pyx_tuple__25 = PyTuple_Pack(2, __pyx_int_3, __pyx_int_9); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(0, 473, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__25); + __Pyx_GIVEREF(__pyx_tuple__25); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":475 + * cdef bint IS_PY_39_OWNARDS = sys.version_info[:2] >= (3, 9) + * + * def frame_eval_func(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * if IS_PY_39_OWNARDS: + */ + __pyx_tuple__26 = PyTuple_Pack(1, __pyx_n_s_state); if (unlikely(!__pyx_tuple__26)) __PYX_ERR(0, 475, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__26); + __Pyx_GIVEREF(__pyx_tuple__26); + __pyx_codeobj__27 = (PyObject*)__Pyx_PyCode_New(0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__26, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_frame_eval_func, 475, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__27)) __PYX_ERR(0, 475, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":484 + * + * + * def stop_frame_eval(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault + */ + __pyx_tuple__28 = PyTuple_Pack(1, __pyx_n_s_state); if (unlikely(!__pyx_tuple__28)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__28); + __Pyx_GIVEREF(__pyx_tuple__28); + __pyx_codeobj__29 = (PyObject*)__Pyx_PyCode_New(0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__28, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_frame_eval_pydevd_frame_2, __pyx_n_s_stop_frame_eval, 484, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__29)) __PYX_ERR(0, 484, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __pyx_unpickle_ThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_tuple__30 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__30)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__30); + __Pyx_GIVEREF(__pyx_tuple__30); + __pyx_codeobj__31 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__30, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_ThreadInfo, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__31)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__32 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__32)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__32); + __Pyx_GIVEREF(__pyx_tuple__32); + __pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__32, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_FuncCodeInfo, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__34 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__34)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__34); + __Pyx_GIVEREF(__pyx_tuple__34); + __pyx_codeobj__35 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__34, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle__CodeLineInfo, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__35)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__36 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__36)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__36); + __Pyx_GIVEREF(__pyx_tuple__36); + __pyx_codeobj__37 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__36, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle__CacheValue, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__37)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_2 = PyInt_FromLong(2); if (unlikely(!__pyx_int_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_3 = PyInt_FromLong(3); if (unlikely(!__pyx_int_3)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_9 = PyInt_FromLong(9); if (unlikely(!__pyx_int_9)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_2520179 = PyInt_FromLong(2520179L); if (unlikely(!__pyx_int_2520179)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_11485321 = PyInt_FromLong(11485321L); if (unlikely(!__pyx_int_11485321)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_64258489 = PyInt_FromLong(64258489L); if (unlikely(!__pyx_int_64258489)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_66829570 = PyInt_FromLong(66829570L); if (unlikely(!__pyx_int_66829570)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_72405718 = PyInt_FromLong(72405718L); if (unlikely(!__pyx_int_72405718)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_95010005 = PyInt_FromLong(95010005L); if (unlikely(!__pyx_int_95010005)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_156687530 = PyInt_FromLong(156687530L); if (unlikely(!__pyx_int_156687530)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_180628038 = PyInt_FromLong(180628038L); if (unlikely(!__pyx_int_180628038)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_188670045 = PyInt_FromLong(188670045L); if (unlikely(!__pyx_int_188670045)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_193022138 = PyInt_FromLong(193022138L); if (unlikely(!__pyx_int_193022138)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_240343912 = PyInt_FromLong(240343912L); if (unlikely(!__pyx_int_240343912)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_249558979 = PyInt_FromLong(249558979L); if (unlikely(!__pyx_int_249558979)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __pyx_vtabptr_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo = &__pyx_vtable_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo; + __pyx_vtable_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo.initialize = (PyObject *(*)(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *, PyFrameObject *))__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_initialize; + __pyx_vtable_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo.initialize_if_possible = (PyObject *(*)(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo *))__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_10ThreadInfo_initialize_if_possible; + if (PyType_Ready(&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo) < 0) __PYX_ERR(0, 24, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo.tp_dictoffset && __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (__Pyx_SetVtable(__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo.tp_dict, __pyx_vtabptr_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo) < 0) __PYX_ERR(0, 24, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_ThreadInfo, (PyObject *)&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo) < 0) __PYX_ERR(0, 24, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo) < 0) __PYX_ERR(0, 24, __pyx_L1_error) + __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo = &__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_ThreadInfo; + if (PyType_Ready(&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo) < 0) __PYX_ERR(0, 125, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo.tp_dictoffset && __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_FuncCodeInfo, (PyObject *)&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo) < 0) __PYX_ERR(0, 125, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo) < 0) __PYX_ERR(0, 125, __pyx_L1_error) + __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo = &__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo; + if (PyType_Ready(&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo) < 0) __PYX_ERR(0, 316, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo.tp_dictoffset && __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_CodeLineInfo, (PyObject *)&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo) < 0) __PYX_ERR(0, 316, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo) < 0) __PYX_ERR(0, 316, __pyx_L1_error) + __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo = &__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo; + __pyx_vtabptr_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue = &__pyx_vtable_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue; + __pyx_vtable_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue.compute_force_stay_in_untraced_mode = (PyObject *(*)(struct __pyx_obj_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue *, PyObject *, int __pyx_skip_dispatch))__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue_compute_force_stay_in_untraced_mode; + if (PyType_Ready(&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue) < 0) __PYX_ERR(0, 361, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue.tp_dictoffset && __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + #if CYTHON_UPDATE_DESCRIPTOR_DOC + { + PyObject *wrapper = PyObject_GetAttrString((PyObject *)&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue, "__init__"); if (unlikely(!wrapper)) __PYX_ERR(0, 361, __pyx_L1_error) + if (Py_TYPE(wrapper) == &PyWrapperDescr_Type) { + __pyx_wrapperbase_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue___init__ = *((PyWrapperDescrObject *)wrapper)->d_base; + __pyx_wrapperbase_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue___init__.doc = __pyx_doc_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue___init__; + ((PyWrapperDescrObject *)wrapper)->d_base = &__pyx_wrapperbase_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_CacheValue___init__; + } + } + #endif + if (__Pyx_SetVtable(__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue.tp_dict, __pyx_vtabptr_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue) < 0) __PYX_ERR(0, 361, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_CacheValue, (PyObject *)&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue) < 0) __PYX_ERR(0, 361, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue) < 0) __PYX_ERR(0, 361, __pyx_L1_error) + __pyx_ptype_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue = &__pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_t_1 = PyImport_ImportModule("_pydevd_bundle.pydevd_cython"); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo = __Pyx_ImportType(__pyx_t_1, "_pydevd_bundle.pydevd_cython", "PyDBAdditionalThreadInfo", sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo) __PYX_ERR(2, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initpydevd_frame_evaluator(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initpydevd_frame_evaluator(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_pydevd_frame_evaluator(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_pydevd_frame_evaluator(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec_pydevd_frame_evaluator(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module 'pydevd_frame_evaluator' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_pydevd_frame_evaluator(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("pydevd_frame_evaluator", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_b); + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_cython_runtime); + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main__pydevd_frame_eval__pydevd_frame_evaluator) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "_pydevd_frame_eval.pydevd_frame_evaluator")) { + if (unlikely(PyDict_SetItemString(modules, "_pydevd_frame_eval.pydevd_frame_evaluator", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":2 + * from __future__ import print_function + * from _pydev_bundle._pydev_saved_modules import threading, thread # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder + * import dis + */ + __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_threading); + __Pyx_GIVEREF(__pyx_n_s_threading); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_threading); + __Pyx_INCREF(__pyx_n_s_thread); + __Pyx_GIVEREF(__pyx_n_s_thread); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_thread); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydev_bundle__pydev_saved_modul, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_threading); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_threading, __pyx_t_1) < 0) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_thread); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_thread, __pyx_t_1) < 0) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":3 + * from __future__ import print_function + * from _pydev_bundle._pydev_saved_modules import threading, thread + * from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder # <<<<<<<<<<<<<< + * import dis + * import sys + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_GlobalDebuggerHolder); + __Pyx_GIVEREF(__pyx_n_s_GlobalDebuggerHolder); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_GlobalDebuggerHolder); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_GlobalDebuggerHolder); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_GlobalDebuggerHolder, __pyx_t_2) < 0) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":4 + * from _pydev_bundle._pydev_saved_modules import threading, thread + * from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder + * import dis # <<<<<<<<<<<<<< + * import sys + * from _pydevd_frame_eval.pydevd_frame_tracing import update_globals_dict, dummy_tracing_holder + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_dis, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_dis, __pyx_t_1) < 0) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":5 + * from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder + * import dis + * import sys # <<<<<<<<<<<<<< + * from _pydevd_frame_eval.pydevd_frame_tracing import update_globals_dict, dummy_tracing_holder + * from _pydevd_frame_eval.pydevd_modify_bytecode import DebugHelper, insert_pydevd_breaks + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_sys, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_sys, __pyx_t_1) < 0) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":6 + * import dis + * import sys + * from _pydevd_frame_eval.pydevd_frame_tracing import update_globals_dict, dummy_tracing_holder # <<<<<<<<<<<<<< + * from _pydevd_frame_eval.pydevd_modify_bytecode import DebugHelper, insert_pydevd_breaks + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + */ + __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_update_globals_dict); + __Pyx_GIVEREF(__pyx_n_s_update_globals_dict); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_update_globals_dict); + __Pyx_INCREF(__pyx_n_s_dummy_tracing_holder); + __Pyx_GIVEREF(__pyx_n_s_dummy_tracing_holder); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_dummy_tracing_holder); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_frame_eval_pydevd_frame, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_update_globals_dict); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_update_globals_dict, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_dummy_tracing_holder); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_dummy_tracing_holder, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":7 + * import sys + * from _pydevd_frame_eval.pydevd_frame_tracing import update_globals_dict, dummy_tracing_holder + * from _pydevd_frame_eval.pydevd_modify_bytecode import DebugHelper, insert_pydevd_breaks # <<<<<<<<<<<<<< + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + * from _pydevd_bundle.pydevd_trace_dispatch import fix_top_level_trace_and_get_trace_func + */ + __pyx_t_2 = PyList_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_DebugHelper); + __Pyx_GIVEREF(__pyx_n_s_DebugHelper); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_DebugHelper); + __Pyx_INCREF(__pyx_n_s_insert_pydevd_breaks); + __Pyx_GIVEREF(__pyx_n_s_insert_pydevd_breaks); + PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_insert_pydevd_breaks); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_frame_eval_pydevd_modify, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_DebugHelper); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DebugHelper, __pyx_t_2) < 0) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_insert_pydevd_breaks); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_insert_pydevd_breaks, __pyx_t_2) < 0) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":8 + * from _pydevd_frame_eval.pydevd_frame_tracing import update_globals_dict, dummy_tracing_holder + * from _pydevd_frame_eval.pydevd_modify_bytecode import DebugHelper, insert_pydevd_breaks + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_trace_dispatch import fix_top_level_trace_and_get_trace_func + * + */ + __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_GIVEREF(__pyx_n_s_get_abs_path_real_path_and_base); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_INCREF(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + __Pyx_GIVEREF(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_file_utils, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_abs_path_real_path_and_base, __pyx_t_1) < 0) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER, __pyx_t_1) < 0) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":9 + * from _pydevd_frame_eval.pydevd_modify_bytecode import DebugHelper, insert_pydevd_breaks + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + * from _pydevd_bundle.pydevd_trace_dispatch import fix_top_level_trace_and_get_trace_func # <<<<<<<<<<<<<< + * + * from _pydevd_bundle.pydevd_additional_thread_info import _set_additional_thread_info_lock + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_fix_top_level_trace_and_get_trac); + __Pyx_GIVEREF(__pyx_n_s_fix_top_level_trace_and_get_trac); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_fix_top_level_trace_and_get_trac); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_trace_disp, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_fix_top_level_trace_and_get_trac); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_fix_top_level_trace_and_get_trac, __pyx_t_2) < 0) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":11 + * from _pydevd_bundle.pydevd_trace_dispatch import fix_top_level_trace_and_get_trace_func + * + * from _pydevd_bundle.pydevd_additional_thread_info import _set_additional_thread_info_lock # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_cython cimport PyDBAdditionalThreadInfo + * from pydevd_tracing import SetTrace + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_set_additional_thread_info_lock); + __Pyx_GIVEREF(__pyx_n_s_set_additional_thread_info_lock); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_set_additional_thread_info_lock); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_additional, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_set_additional_thread_info_lock); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_set_additional_thread_info_lock, __pyx_t_1) < 0) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":13 + * from _pydevd_bundle.pydevd_additional_thread_info import _set_additional_thread_info_lock + * from _pydevd_bundle.pydevd_cython cimport PyDBAdditionalThreadInfo + * from pydevd_tracing import SetTrace # <<<<<<<<<<<<<< + * + * _get_ident = threading.get_ident # Note this is py3 only, if py2 needed to be supported, _get_ident would be needed. + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_SetTrace); + __Pyx_GIVEREF(__pyx_n_s_SetTrace); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_SetTrace); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_tracing, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_SetTrace); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_SetTrace, __pyx_t_2) < 0) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":15 + * from pydevd_tracing import SetTrace + * + * _get_ident = threading.get_ident # Note this is py3 only, if py2 needed to be supported, _get_ident would be needed. # <<<<<<<<<<<<<< + * _thread_local_info = threading.local() + * _thread_active = threading._active + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_threading); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_get_ident_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_ident, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":16 + * + * _get_ident = threading.get_ident # Note this is py3 only, if py2 needed to be supported, _get_ident would be needed. + * _thread_local_info = threading.local() # <<<<<<<<<<<<<< + * _thread_active = threading._active + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_threading); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_local); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_thread_local_info, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":17 + * _get_ident = threading.get_ident # Note this is py3 only, if py2 needed to be supported, _get_ident would be needed. + * _thread_local_info = threading.local() + * _thread_active = threading._active # <<<<<<<<<<<<<< + * + * def clear_thread_local_info(): + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_threading); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_active); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_thread_active, __pyx_t_1) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":19 + * _thread_active = threading._active + * + * def clear_thread_local_info(): # <<<<<<<<<<<<<< + * global _thread_local_info + * _thread_local_info = threading.local() + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_1clear_thread_local_info, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_clear_thread_local_info, __pyx_t_1) < 0) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":152 + * + * + * def dummy_trace_dispatch(frame, str event, arg): # <<<<<<<<<<<<<< + * if event == 'call': + * if frame.f_trace is not None: + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_3dummy_trace_dispatch, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 152, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_dummy_trace_dispatch, __pyx_t_1) < 0) __PYX_ERR(0, 152, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":159 + * + * + * def get_thread_info_py() -> ThreadInfo: # <<<<<<<<<<<<<< + * return get_thread_info(PyEval_GetFrame()) + * + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_5get_thread_info_py, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 159, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_thread_info_py, __pyx_t_1) < 0) __PYX_ERR(0, 159, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":196 + * + * + * def decref_py(obj): # <<<<<<<<<<<<<< + * ''' + * Helper to be called from Python. + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_7decref_py, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 196, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_decref_py, __pyx_t_1) < 0) __PYX_ERR(0, 196, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":203 + * + * + * def get_func_code_info_py(thread_info, frame, code_obj) -> FuncCodeInfo: # <<<<<<<<<<<<<< + * ''' + * Helper to be called from Python. + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_9get_func_code_info_py, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 203, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_func_code_info_py, __pyx_t_1) < 0) __PYX_ERR(0, 203, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":210 + * + * + * cdef int _code_extra_index = -1 # <<<<<<<<<<<<<< + * + * cdef FuncCodeInfo get_func_code_info(ThreadInfo thread_info, PyFrameObject * frame_obj, PyCodeObject * code_obj): + */ + __pyx_v_18_pydevd_frame_eval_22pydevd_frame_evaluator__code_extra_index = -1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":329 + * + * # Note: this method has a version in pure-python too. + * def _get_code_line_info(code_obj): # <<<<<<<<<<<<<< + * line_to_offset: dict = {} + * first_line: int = None + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_11_get_code_line_info, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_code_line_info, __pyx_t_1) < 0) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":351 + * # handled by the cython side in `FuncCodeInfo get_func_code_info` by providing the + * # same code info if the debugger mtime is still the same). + * _cache: dict = {} # <<<<<<<<<<<<<< + * + * def get_cached_code_obj_info_py(code_obj_py): + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 351, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_cache, __pyx_t_1) < 0) __PYX_ERR(0, 351, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":353 + * _cache: dict = {} + * + * def get_cached_code_obj_info_py(code_obj_py): # <<<<<<<<<<<<<< + * ''' + * :return _CacheValue: + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_13get_cached_code_obj_info_py, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 353, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_cached_code_obj_info_py, __pyx_t_1) < 0) __PYX_ERR(0, 353, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":401 + * return breakpoint_found, force_stay_in_untraced_mode + * + * def generate_code_with_breakpoints_py(object code_obj_py, dict breakpoints): # <<<<<<<<<<<<<< + * return generate_code_with_breakpoints(code_obj_py, breakpoints) + * + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_15generate_code_with_breakpoints_py, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_generate_code_with_breakpoints_p, __pyx_t_1) < 0) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":471 + * return breakpoint_found, code_obj_py + * + * import sys # <<<<<<<<<<<<<< + * + * cdef bint IS_PY_39_OWNARDS = sys.version_info[:2] >= (3, 9) + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_sys, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 471, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_sys, __pyx_t_1) < 0) __PYX_ERR(0, 471, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":473 + * import sys + * + * cdef bint IS_PY_39_OWNARDS = sys.version_info[:2] >= (3, 9) # <<<<<<<<<<<<<< + * + * def frame_eval_func(): + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_sys); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 473, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_version_info); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 473, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetSlice(__pyx_t_2, 0, 2, NULL, NULL, &__pyx_slice__24, 0, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 473, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = PyObject_RichCompare(__pyx_t_1, __pyx_tuple__25, Py_GE); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 473, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 473, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_18_pydevd_frame_eval_22pydevd_frame_evaluator_IS_PY_39_OWNARDS = __pyx_t_3; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":475 + * cdef bint IS_PY_39_OWNARDS = sys.version_info[:2] >= (3, 9) + * + * def frame_eval_func(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * if IS_PY_39_OWNARDS: + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_17frame_eval_func, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 475, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_frame_eval_func, __pyx_t_2) < 0) __PYX_ERR(0, 475, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":484 + * + * + * def stop_frame_eval(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_19stop_frame_eval, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_stop_frame_eval, __pyx_t_2) < 0) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_ThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_21__pyx_unpickle_ThreadInfo, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_ThreadInfo, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "(tree fragment)":11 + * __pyx_unpickle_ThreadInfo__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_ThreadInfo__set_state(ThreadInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._can_create_dummy_thread = __pyx_state[0]; __pyx_result.additional_info = __pyx_state[1]; __pyx_result.force_stay_in_untraced_mode = __pyx_state[2]; __pyx_result.fully_initialized = __pyx_state[3]; __pyx_result.inside_frame_eval = __pyx_state[4]; __pyx_result.is_pydevd_thread = __pyx_state[5]; __pyx_result.thread_trace_func = __pyx_state[6] + * if len(__pyx_state) > 7 and hasattr(__pyx_result, '__dict__'): + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_23__pyx_unpickle_FuncCodeInfo, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_FuncCodeInfo, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "(tree fragment)":1 + * def __pyx_unpickle__CodeLineInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_25__pyx_unpickle__CodeLineInfo, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle__CodeLineInfo, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "(tree fragment)":11 + * __pyx_unpickle__CodeLineInfo__set_state(<_CodeLineInfo> __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle__CodeLineInfo__set_state(_CodeLineInfo __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.first_line = __pyx_state[0]; __pyx_result.last_line = __pyx_state[1]; __pyx_result.line_to_offset = __pyx_state[2] + * if len(__pyx_state) > 3 and hasattr(__pyx_result, '__dict__'): + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_27__pyx_unpickle__CacheValue, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle__CacheValue, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":1 + * from __future__ import print_function # <<<<<<<<<<<<<< + * from _pydev_bundle._pydev_saved_modules import threading, thread + * from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init _pydevd_frame_eval.pydevd_frame_evaluator", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init _pydevd_frame_eval.pydevd_frame_evaluator"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* PyDictVersioning */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallNoArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) +#else + if (likely(PyCFunction_Check(func))) +#endif + { + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); +} +#endif + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (__Pyx_PyFastCFunction_Check(func)) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* PyObjectCall2Args */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args, *result = NULL; + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyFunction_FastCall(function, args, 2); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyCFunction_FastCall(function, args, 2); + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); +done: + return result; +} + +/* PyIntBinop */ +#if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_AddObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, int inplace, int zerodivision_check) { + (void)inplace; + (void)zerodivision_check; + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(op1))) { + const long b = intval; + long x; + long a = PyInt_AS_LONG(op1); + x = (long)((unsigned long)a + b); + if (likely((x^a) >= 0 || (x^b) >= 0)) + return PyInt_FromLong(x); + return PyLong_Type.tp_as_number->nb_add(op1, op2); + } + #endif + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact(op1))) { + const long b = intval; + long a, x; +#ifdef HAVE_LONG_LONG + const PY_LONG_LONG llb = intval; + PY_LONG_LONG lla, llx; +#endif + const digit* digits = ((PyLongObject*)op1)->ob_digit; + const Py_ssize_t size = Py_SIZE(op1); + if (likely(__Pyx_sst_abs(size) <= 1)) { + a = likely(size) ? digits[0] : 0; + if (size == -1) a = -a; + } else { + switch (size) { + case -2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case 2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case -3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case 3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case -4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + case 4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + CYTHON_FALLTHROUGH; + default: return PyLong_Type.tp_as_number->nb_add(op1, op2); + } + } + x = a + b; + return PyLong_FromLong(x); +#ifdef HAVE_LONG_LONG + long_long: + llx = lla + llb; + return PyLong_FromLongLong(llx); +#endif + + + } + #endif + if (PyFloat_CheckExact(op1)) { + const long b = intval; + double a = PyFloat_AS_DOUBLE(op1); + double result; + PyFPE_START_PROTECT("add", return NULL) + result = ((double)a) + (double)b; + PyFPE_END_PROTECT(result) + return PyFloat_FromDouble(result); + } + return (inplace ? PyNumber_InPlaceAdd : PyNumber_Add)(op1, op2); +} +#endif + +/* SliceObject */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice(PyObject* obj, + Py_ssize_t cstart, Py_ssize_t cstop, + PyObject** _py_start, PyObject** _py_stop, PyObject** _py_slice, + int has_cstart, int has_cstop, CYTHON_UNUSED int wraparound) { +#if CYTHON_USE_TYPE_SLOTS + PyMappingMethods* mp; +#if PY_MAJOR_VERSION < 3 + PySequenceMethods* ms = Py_TYPE(obj)->tp_as_sequence; + if (likely(ms && ms->sq_slice)) { + if (!has_cstart) { + if (_py_start && (*_py_start != Py_None)) { + cstart = __Pyx_PyIndex_AsSsize_t(*_py_start); + if ((cstart == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; + } else + cstart = 0; + } + if (!has_cstop) { + if (_py_stop && (*_py_stop != Py_None)) { + cstop = __Pyx_PyIndex_AsSsize_t(*_py_stop); + if ((cstop == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; + } else + cstop = PY_SSIZE_T_MAX; + } + if (wraparound && unlikely((cstart < 0) | (cstop < 0)) && likely(ms->sq_length)) { + Py_ssize_t l = ms->sq_length(obj); + if (likely(l >= 0)) { + if (cstop < 0) { + cstop += l; + if (cstop < 0) cstop = 0; + } + if (cstart < 0) { + cstart += l; + if (cstart < 0) cstart = 0; + } + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + goto bad; + PyErr_Clear(); + } + } + return ms->sq_slice(obj, cstart, cstop); + } +#endif + mp = Py_TYPE(obj)->tp_as_mapping; + if (likely(mp && mp->mp_subscript)) +#endif + { + PyObject* result; + PyObject *py_slice, *py_start, *py_stop; + if (_py_slice) { + py_slice = *_py_slice; + } else { + PyObject* owned_start = NULL; + PyObject* owned_stop = NULL; + if (_py_start) { + py_start = *_py_start; + } else { + if (has_cstart) { + owned_start = py_start = PyInt_FromSsize_t(cstart); + if (unlikely(!py_start)) goto bad; + } else + py_start = Py_None; + } + if (_py_stop) { + py_stop = *_py_stop; + } else { + if (has_cstop) { + owned_stop = py_stop = PyInt_FromSsize_t(cstop); + if (unlikely(!py_stop)) { + Py_XDECREF(owned_start); + goto bad; + } + } else + py_stop = Py_None; + } + py_slice = PySlice_New(py_start, py_stop, Py_None); + Py_XDECREF(owned_start); + Py_XDECREF(owned_stop); + if (unlikely(!py_slice)) goto bad; + } +#if CYTHON_USE_TYPE_SLOTS + result = mp->mp_subscript(obj, py_slice); +#else + result = PyObject_GetItem(obj, py_slice); +#endif + if (!_py_slice) { + Py_DECREF(py_slice); + } + return result; + } + PyErr_Format(PyExc_TypeError, + "'%.200s' object is unsliceable", Py_TYPE(obj)->tp_name); +bad: + return NULL; +} + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* GetAttr3 */ +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r = __Pyx_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +} + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* GetTopmostException */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* PyObjectSetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_setattro)) + return tp->tp_setattro(obj, attr_name, value); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_setattr)) + return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value); +#endif + return PyObject_SetAttr(obj, attr_name, value); +} +#endif + +/* None */ +static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) { + PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname); +} + +/* ExtTypeTest */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* SwapException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = *type; + exc_info->exc_value = *value; + exc_info->exc_traceback = *tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = *type; + tstate->exc_value = *value; + tstate->exc_traceback = *tb; + #endif + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); + PyErr_SetExcInfo(*type, *value, *tb); + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#endif + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kwdict, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + while (PyDict_Next(kwdict, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if ((!kw_allowed) && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* ArgTypeTest */ +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + else if (exact) { + #if PY_MAJOR_VERSION == 2 + if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(__Pyx_TypeCheck(obj, type))) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +/* DictGetItem */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + if (unlikely(PyTuple_Check(key))) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) { + PyErr_SetObject(PyExc_KeyError, args); + Py_DECREF(args); + } + } else { + PyErr_SetObject(PyExc_KeyError, key); + } + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* RaiseTooManyValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* IterFinish */ +static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +/* UnpackItemEndCheck */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +/* PyObjectGetMethod */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + assert (*method == NULL); + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); +#if PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type))) + #endif +#else + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + if (meth_found) { + *method = descr; + return 1; + } + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + if (descr != NULL) { + *method = descr; + return 0; + } + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(name)); +#endif + return 0; +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif +try_unpack: +#if CYTHON_UNPACK_METHODS + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + +/* PyObjectCallMethod0 */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { + PyObject *method = NULL, *result = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_CallOneArg(method, obj); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) goto bad; + result = __Pyx_PyObject_CallNoArg(method); + Py_DECREF(method); +bad: + return result; +} + +/* RaiseNoneIterError */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +/* UnpackTupleError */ +static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) { + if (t == Py_None) { + __Pyx_RaiseNoneNotIterableError(); + } else if (PyTuple_GET_SIZE(t) < index) { + __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t)); + } else { + __Pyx_RaiseTooManyValuesError(index); + } +} + +/* UnpackTuple2 */ +static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( + PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int decref_tuple) { + PyObject *value1 = NULL, *value2 = NULL; +#if CYTHON_COMPILING_IN_PYPY + value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad; + value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad; +#else + value1 = PyTuple_GET_ITEM(tuple, 0); Py_INCREF(value1); + value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value2); +#endif + if (decref_tuple) { + Py_DECREF(tuple); + } + *pvalue1 = value1; + *pvalue2 = value2; + return 0; +#if CYTHON_COMPILING_IN_PYPY +bad: + Py_XDECREF(value1); + Py_XDECREF(value2); + if (decref_tuple) { Py_XDECREF(tuple); } + return -1; +#endif +} +static int __Pyx_unpack_tuple2_generic(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, + int has_known_size, int decref_tuple) { + Py_ssize_t index; + PyObject *value1 = NULL, *value2 = NULL, *iter = NULL; + iternextfunc iternext; + iter = PyObject_GetIter(tuple); + if (unlikely(!iter)) goto bad; + if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; } + iternext = Py_TYPE(iter)->tp_iternext; + value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; } + value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; } + if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad; + Py_DECREF(iter); + *pvalue1 = value1; + *pvalue2 = value2; + return 0; +unpacking_failed: + if (!has_known_size && __Pyx_IterFinish() == 0) + __Pyx_RaiseNeedMoreValuesError(index); +bad: + Py_XDECREF(iter); + Py_XDECREF(value1); + Py_XDECREF(value2); + if (decref_tuple) { Py_XDECREF(tuple); } + return -1; +} + +/* dict_iter */ +static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_dict, PyObject* method_name, + Py_ssize_t* p_orig_length, int* p_source_is_dict) { + is_dict = is_dict || likely(PyDict_CheckExact(iterable)); + *p_source_is_dict = is_dict; + if (is_dict) { +#if !CYTHON_COMPILING_IN_PYPY + *p_orig_length = PyDict_Size(iterable); + Py_INCREF(iterable); + return iterable; +#elif PY_MAJOR_VERSION >= 3 + static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL; + PyObject **pp = NULL; + if (method_name) { + const char *name = PyUnicode_AsUTF8(method_name); + if (strcmp(name, "iteritems") == 0) pp = &py_items; + else if (strcmp(name, "iterkeys") == 0) pp = &py_keys; + else if (strcmp(name, "itervalues") == 0) pp = &py_values; + if (pp) { + if (!*pp) { + *pp = PyUnicode_FromString(name + 4); + if (!*pp) + return NULL; + } + method_name = *pp; + } + } +#endif + } + *p_orig_length = 0; + if (method_name) { + PyObject* iter; + iterable = __Pyx_PyObject_CallMethod0(iterable, method_name); + if (!iterable) + return NULL; +#if !CYTHON_COMPILING_IN_PYPY + if (PyTuple_CheckExact(iterable) || PyList_CheckExact(iterable)) + return iterable; +#endif + iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + return iter; + } + return PyObject_GetIter(iterable); +} +static CYTHON_INLINE int __Pyx_dict_iter_next( + PyObject* iter_obj, CYTHON_NCP_UNUSED Py_ssize_t orig_length, CYTHON_NCP_UNUSED Py_ssize_t* ppos, + PyObject** pkey, PyObject** pvalue, PyObject** pitem, int source_is_dict) { + PyObject* next_item; +#if !CYTHON_COMPILING_IN_PYPY + if (source_is_dict) { + PyObject *key, *value; + if (unlikely(orig_length != PyDict_Size(iter_obj))) { + PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); + return -1; + } + if (unlikely(!PyDict_Next(iter_obj, ppos, &key, &value))) { + return 0; + } + if (pitem) { + PyObject* tuple = PyTuple_New(2); + if (unlikely(!tuple)) { + return -1; + } + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(tuple, 0, key); + PyTuple_SET_ITEM(tuple, 1, value); + *pitem = tuple; + } else { + if (pkey) { + Py_INCREF(key); + *pkey = key; + } + if (pvalue) { + Py_INCREF(value); + *pvalue = value; + } + } + return 1; + } else if (PyTuple_CheckExact(iter_obj)) { + Py_ssize_t pos = *ppos; + if (unlikely(pos >= PyTuple_GET_SIZE(iter_obj))) return 0; + *ppos = pos + 1; + next_item = PyTuple_GET_ITEM(iter_obj, pos); + Py_INCREF(next_item); + } else if (PyList_CheckExact(iter_obj)) { + Py_ssize_t pos = *ppos; + if (unlikely(pos >= PyList_GET_SIZE(iter_obj))) return 0; + *ppos = pos + 1; + next_item = PyList_GET_ITEM(iter_obj, pos); + Py_INCREF(next_item); + } else +#endif + { + next_item = PyIter_Next(iter_obj); + if (unlikely(!next_item)) { + return __Pyx_IterFinish(); + } + } + if (pitem) { + *pitem = next_item; + } else if (pkey && pvalue) { + if (__Pyx_unpack_tuple2(next_item, pkey, pvalue, source_is_dict, source_is_dict, 1)) + return -1; + } else if (pkey) { + *pkey = next_item; + } else { + *pvalue = next_item; + } + return 1; +} + +/* WriteUnraisableException */ +static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, + CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, + int full_traceback, CYTHON_UNUSED int nogil) { + PyObject *old_exc, *old_val, *old_tb; + PyObject *ctx; + __Pyx_PyThreadState_declare +#ifdef WITH_THREAD + PyGILState_STATE state; + if (nogil) + state = PyGILState_Ensure(); +#ifdef _MSC_VER + else state = (PyGILState_STATE)-1; +#endif +#endif + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); + if (full_traceback) { + Py_XINCREF(old_exc); + Py_XINCREF(old_val); + Py_XINCREF(old_tb); + __Pyx_ErrRestore(old_exc, old_val, old_tb); + PyErr_PrintEx(1); + } + #if PY_MAJOR_VERSION < 3 + ctx = PyString_FromString(name); + #else + ctx = PyUnicode_FromString(name); + #endif + __Pyx_ErrRestore(old_exc, old_val, old_tb); + if (!ctx) { + PyErr_WriteUnraisable(Py_None); + } else { + PyErr_WriteUnraisable(ctx); + Py_DECREF(ctx); + } +#ifdef WITH_THREAD + if (nogil) + PyGILState_Release(state); +#endif +} + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* HasAttr */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (unlikely(!r)) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, attr_name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(attr_name)); +#endif + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* SetVTable */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject *ob = PyCapsule_New(vtable, 0, 0); +#else + PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); +#endif + if (!ob) + goto bad; + if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* PyObjectGetAttrStrNoError */ +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +} + +/* SetupReduce */ +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_getstate = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; + PyObject *getstate = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); +#else + getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); + if (!getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (getstate) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); +#else + object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); + if (!object_getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (object_getstate != getstate) { + goto __PYX_GOOD; + } + } +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + goto __PYX_BAD; + } + setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + goto __PYX_BAD; + } + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto __PYX_GOOD; +__PYX_BAD: + if (!PyErr_Occurred()) + PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +__PYX_GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); + Py_XDECREF(object_getstate); + Py_XDECREF(getstate); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} + +/* TypeImport */ +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, + size_t size, enum __Pyx_ImportType_CheckSize check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if ((size_t)basicsize < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +#if PY_VERSION_HEX >= 0x030b00a6 + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject *ptype, *pvalue, *ptraceback; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) { + /* If the code object creation fails, then we should clear the + fetched exception references and propagate the new exception */ + Py_XDECREF(ptype); + Py_XDECREF(pvalue); + Py_XDECREF(ptraceback); + goto bad; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i '9'); + break; + } + if (rt_from_call[i] != ctversion[i]) { + same = 0; + break; + } + } + if (!same) { + char rtversion[5] = {'\0'}; + char message[200]; + for (i=0; i<4; ++i) { + if (rt_from_call[i] == '.') { + if (found_dot) break; + found_dot = 1; + } else if (rt_from_call[i] < '0' || rt_from_call[i] > '9') { + break; + } + rtversion[i] = rt_from_call[i]; + } + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* InitStrings */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd new file mode 100644 index 0000000..d8fb5f2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd @@ -0,0 +1,131 @@ +from cpython.mem cimport PyMem_Malloc, PyMem_Free + +cdef extern from *: + ctypedef void PyObject + ctypedef struct PyCodeObject: + int co_argcount; # arguments, except *args */ + int co_kwonlyargcount; # keyword only arguments */ + int co_nlocals; # local variables */ + int co_stacksize; # entries needed for evaluation stack */ + int co_flags; # CO_..., see below */ + int co_firstlineno; # first source line number */ + PyObject *co_code; # instruction opcodes */ + PyObject *co_consts; # list (constants used) */ + PyObject *co_names; # list of strings (names used) */ + PyObject *co_varnames; # tuple of strings (local variable names) */ + PyObject *co_freevars; # tuple of strings (free variable names) */ + PyObject *co_cellvars; # tuple of strings (cell variable names) */ + unsigned char *co_cell2arg; # Maps cell vars which are arguments. */ + PyObject *co_filename; # unicode (where it was loaded from) */ + PyObject *co_name; # unicode (name, for reference) */ + PyObject *co_lnotab; # string (encoding addr<->lineno mapping) See + # Objects/lnotab_notes.txt for details. */ + void *co_zombieframe; # for optimization only (see frameobject.c) */ + PyObject *co_weakreflist; # to support weakrefs to code objects */ + void *co_extra; + +cdef extern from "frameobject.h": + ctypedef struct PyFrameObject: + PyFrameObject *f_back + PyCodeObject *f_code # code segment + PyObject *f_builtins # builtin symbol table (PyDictObject) + PyObject *f_globals # global symbol table (PyDictObject) */ + PyObject *f_locals # local symbol table (any mapping) */ + PyObject **f_valuestack # + PyObject **f_stacktop + PyObject *f_trace # Trace function */ + PyObject *f_exc_type + PyObject *f_exc_value + PyObject *f_exc_traceback + PyObject *f_gen; + + int f_lasti; #/* Last instruction if called */ + int f_lineno; #/* Current line number */ + int f_iblock; #/* index in f_blockstack */ + char f_executing; #/* whether the frame is still executing */ + PyObject *f_localsplus[1]; + +cdef extern from "release_mem.h": + void release_co_extra(void *) + +cdef extern from "code.h": + ctypedef void freefunc(void *) + int _PyCode_GetExtra(PyObject *code, Py_ssize_t index, void **extra) + int _PyCode_SetExtra(PyObject *code, Py_ssize_t index, void *extra) + +# TODO: Things are in a different place for Python 3.11. +# cdef extern from "cpython/code.h": +# ctypedef void freefunc(void *) +# int _PyCode_GetExtra(PyObject *code, Py_ssize_t index, void **extra) +# int _PyCode_SetExtra(PyObject *code, Py_ssize_t index, void *extra) + +cdef extern from "Python.h": + void Py_INCREF(object o) + void Py_DECREF(object o) + object PyImport_ImportModule(char *name) + PyObject* PyObject_CallFunction(PyObject *callable, const char *format, ...) + object PyObject_GetAttrString(object o, char *attr_name) + +cdef extern from "pystate.h": + # ctypedef PyObject* _PyFrameEvalFunction(PyThreadState* tstate, PyFrameObject *frame, int exc) + # ctypedef PyObject* _PyFrameEvalFunction(PyFrameObject *frame, int exc) + ctypedef PyObject* _PyFrameEvalFunction(...) + + ctypedef struct PyInterpreterState: + PyInterpreterState *next + PyInterpreterState *tstate_head + + PyObject *modules + + PyObject *modules_by_index + PyObject *sysdict + PyObject *builtins + PyObject *importlib + + PyObject *codec_search_path + PyObject *codec_search_cache + PyObject *codec_error_registry + int codecs_initialized + int fscodec_initialized + + int dlopenflags + + PyObject *builtins_copy + PyObject *import_func + # Initialized to PyEval_EvalFrameDefault(). + _PyFrameEvalFunction eval_frame + + ctypedef struct PyThreadState: + PyThreadState *prev + PyThreadState *next + PyInterpreterState *interp + # ... + + PyThreadState *PyThreadState_Get() + +cdef extern from "ceval.h": + ''' +#if PY_VERSION_HEX >= 0x03090000 +PyObject * noop(PyFrameObject *frame, int exc) { + return NULL; +} +#define CALL_EvalFrameDefault_38(a, b) noop(a, b) +#define CALL_EvalFrameDefault_39(a, b, c) _PyEval_EvalFrameDefault(a, b, c) +#else +PyObject * noop(PyThreadState* tstate, PyFrameObject *frame, int exc) { + return NULL; +} +#define CALL_EvalFrameDefault_39(a, b, c) noop(a, b, c) +#define CALL_EvalFrameDefault_38(a, b) _PyEval_EvalFrameDefault(a, b) +#endif + ''' + + int _PyEval_RequestCodeExtraIndex(freefunc) + PyFrameObject *PyEval_GetFrame() + PyObject* PyEval_CallFunction(PyObject *callable, const char *format, ...) + + # PyObject* _PyEval_EvalFrameDefault(PyThreadState* tstate, PyFrameObject *frame, int exc) + # PyObject* _PyEval_EvalFrameDefault(PyFrameObject *frame, int exc) + PyObject* _PyEval_EvalFrameDefault(...) + PyObject* CALL_EvalFrameDefault_38(PyFrameObject *frame, int exc) # Actually a macro. + PyObject* CALL_EvalFrameDefault_39(PyThreadState* tstate, PyFrameObject *frame, int exc) # Actually a macro. diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.template.pyx b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.template.pyx new file mode 100644 index 0000000..1bbf9f8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.template.pyx @@ -0,0 +1,613 @@ +from __future__ import print_function +from _pydev_bundle._pydev_saved_modules import threading, thread +from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder +import dis +import sys +from _pydevd_frame_eval.pydevd_frame_tracing import update_globals_dict, dummy_tracing_holder +from _pydevd_frame_eval.pydevd_modify_bytecode import DebugHelper, insert_pydevd_breaks +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER +from _pydevd_bundle.pydevd_trace_dispatch import fix_top_level_trace_and_get_trace_func + +from _pydevd_bundle.pydevd_additional_thread_info import _set_additional_thread_info_lock +from _pydevd_bundle.pydevd_cython cimport PyDBAdditionalThreadInfo +from pydevd_tracing import SetTrace + +_get_ident = threading.get_ident # Note this is py3 only, if py2 needed to be supported, _get_ident would be needed. +_thread_local_info = threading.local() +_thread_active = threading._active + +def clear_thread_local_info(): + global _thread_local_info + _thread_local_info = threading.local() + + +cdef class ThreadInfo: + + cdef public PyDBAdditionalThreadInfo additional_info + cdef public bint is_pydevd_thread + cdef public int inside_frame_eval + cdef public bint fully_initialized + cdef public object thread_trace_func + cdef bint _can_create_dummy_thread + + # Note: whenever get_func_code_info is called, this value is reset (we're using + # it as a thread-local value info). + # If True the debugger should not go into trace mode even if the new + # code for a function is None and there are breakpoints. + cdef public bint force_stay_in_untraced_mode + + cdef initialize(self, PyFrameObject * frame_obj): + # Places that create a ThreadInfo should verify that + # a current Python frame is being executed! + assert frame_obj != NULL + + self.additional_info = None + self.is_pydevd_thread = False + self.inside_frame_eval = 0 + self.fully_initialized = False + self.thread_trace_func = None + + # Get the root (if it's not a Thread initialized from the threading + # module, create the dummy thread entry so that we can debug it -- + # otherwise, we have to wait for the threading module itself to + # create the Thread entry). + while frame_obj.f_back != NULL: + frame_obj = frame_obj.f_back + + basename = frame_obj.f_code.co_filename + i = basename.rfind('/') + j = basename.rfind('\\') + if j > i: + i = j + if i >= 0: + basename = basename[i + 1:] + # remove ext + i = basename.rfind('.') + if i >= 0: + basename = basename[:i] + + co_name = frame_obj.f_code.co_name + + # In these cases we cannot create a dummy thread (an actual + # thread will be created later or tracing will already be set). + if basename == 'threading' and co_name in ('__bootstrap', '_bootstrap', '__bootstrap_inner', '_bootstrap_inner'): + self._can_create_dummy_thread = False + elif basename == 'pydev_monkey' and co_name == '__call__': + self._can_create_dummy_thread = False + elif basename == 'pydevd' and co_name in ('run', 'main', '_exec'): + self._can_create_dummy_thread = False + elif basename == 'pydevd_tracing': + self._can_create_dummy_thread = False + else: + self._can_create_dummy_thread = True + + # print('Can create dummy thread for thread started in: %s %s' % (basename, co_name)) + + cdef initialize_if_possible(self): + # Don't call threading.currentThread because if we're too early in the process + # we may create a dummy thread. + self.inside_frame_eval += 1 + + try: + thread_ident = _get_ident() + t = _thread_active.get(thread_ident) + if t is None: + if self._can_create_dummy_thread: + # Initialize the dummy thread and set the tracing (both are needed to + # actually stop on breakpoints). + t = threading.current_thread() + SetTrace(dummy_trace_dispatch) + else: + return # Cannot initialize until thread becomes active. + + if getattr(t, 'is_pydev_daemon_thread', False): + self.is_pydevd_thread = True + self.fully_initialized = True + else: + try: + additional_info = t.additional_info + if additional_info is None: + raise AttributeError() + except: + with _set_additional_thread_info_lock: + # If it's not there, set it within a lock to avoid any racing + # conditions. + additional_info = getattr(thread, 'additional_info', None) + if additional_info is None: + additional_info = PyDBAdditionalThreadInfo() + t.additional_info = additional_info + self.additional_info = additional_info + self.fully_initialized = True + finally: + self.inside_frame_eval -= 1 + + +cdef class FuncCodeInfo: + + cdef public str co_filename + cdef public str co_name + cdef public str canonical_normalized_filename + cdef bint always_skip_code + cdef public bint breakpoint_found + cdef public object new_code + + # When breakpoints_mtime != PyDb.mtime the validity of breakpoints have + # to be re-evaluated (if invalid a new FuncCodeInfo must be created and + # tracing can't be disabled for the related frames). + cdef public int breakpoints_mtime + + def __init__(self): + self.co_filename = '' + self.canonical_normalized_filename = '' + self.always_skip_code = False + + # If breakpoints are found but new_code is None, + # this means we weren't able to actually add the code + # where needed, so, fallback to tracing. + self.breakpoint_found = False + self.new_code = None + self.breakpoints_mtime = -1 + + +def dummy_trace_dispatch(frame, str event, arg): + if event == 'call': + if frame.f_trace is not None: + return frame.f_trace(frame, event, arg) + return None + + +def get_thread_info_py() -> ThreadInfo: + return get_thread_info(PyEval_GetFrame()) + + +cdef ThreadInfo get_thread_info(PyFrameObject * frame_obj): + ''' + Provides thread-related info. + + May return None if the thread is still not active. + ''' + cdef ThreadInfo thread_info + try: + # Note: changing to a `dict[thread.ident] = thread_info` had almost no + # effect in the performance. + thread_info = _thread_local_info.thread_info + except: + if frame_obj == NULL: + return None + thread_info = ThreadInfo() + thread_info.initialize(frame_obj) + thread_info.inside_frame_eval += 1 + try: + _thread_local_info.thread_info = thread_info + + # Note: _code_extra_index is not actually thread-related, + # but this is a good point to initialize it. + global _code_extra_index + if _code_extra_index == -1: + _code_extra_index = _PyEval_RequestCodeExtraIndex(release_co_extra) + + thread_info.initialize_if_possible() + finally: + thread_info.inside_frame_eval -= 1 + + return thread_info + + +def decref_py(obj): + ''' + Helper to be called from Python. + ''' + Py_DECREF(obj) + + +def get_func_code_info_py(thread_info, frame, code_obj) -> FuncCodeInfo: + ''' + Helper to be called from Python. + ''' + return get_func_code_info( thread_info, frame, code_obj) + + +cdef int _code_extra_index = -1 + +cdef FuncCodeInfo get_func_code_info(ThreadInfo thread_info, PyFrameObject * frame_obj, PyCodeObject * code_obj): + ''' + Provides code-object related info. + + Stores the gathered info in a cache in the code object itself. Note that + multiple threads can get the same info. + + get_thread_info() *must* be called at least once before get_func_code_info() + to initialize _code_extra_index. + + ''' + # f_code = code_obj + # DEBUG = f_code.co_filename.endswith('_debugger_case_multiprocessing.py') + # if DEBUG: + # print('get_func_code_info', f_code.co_name, f_code.co_filename) + + cdef object main_debugger = GlobalDebuggerHolder.global_dbg + thread_info.force_stay_in_untraced_mode = False # This is an output value of the function. + + cdef PyObject * extra + _PyCode_GetExtra( code_obj, _code_extra_index, & extra) + if extra is not NULL: + extra_obj = extra + if extra_obj is not NULL: + func_code_info_obj = extra_obj + if func_code_info_obj.breakpoints_mtime == main_debugger.mtime: + # if DEBUG: + # print('get_func_code_info: matched mtime', f_code.co_name, f_code.co_filename) + + return func_code_info_obj + + cdef str co_filename = code_obj.co_filename + cdef str co_name = code_obj.co_name + cdef dict cache_file_type + cdef tuple cache_file_type_key + + func_code_info = FuncCodeInfo() + func_code_info.breakpoints_mtime = main_debugger.mtime + + func_code_info.co_filename = co_filename + func_code_info.co_name = co_name + + if not func_code_info.always_skip_code: + try: + abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[co_filename] + except: + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame_obj) + + func_code_info.canonical_normalized_filename = abs_path_real_path_and_base[1] + + cache_file_type = main_debugger.get_cache_file_type() + # Note: this cache key must be the same from PyDB.get_file_type() -- see it for comments + # on the cache. + cache_file_type_key = (frame_obj.f_code.co_firstlineno, abs_path_real_path_and_base[0], frame_obj.f_code) + try: + file_type = cache_file_type[cache_file_type_key] # Make it faster + except: + file_type = main_debugger.get_file_type(frame_obj, abs_path_real_path_and_base) # we don't want to debug anything related to pydevd + + if file_type is not None: + func_code_info.always_skip_code = True + + if not func_code_info.always_skip_code: + if main_debugger is not None: + + breakpoints: dict = main_debugger.breakpoints.get(func_code_info.canonical_normalized_filename) + function_breakpoint: object = main_debugger.function_breakpoint_name_to_breakpoint.get(func_code_info.co_name) + # print('\n---') + # print(main_debugger.breakpoints) + # print(func_code_info.canonical_normalized_filename) + # print(main_debugger.breakpoints.get(func_code_info.canonical_normalized_filename)) + code_obj_py: object = code_obj + cached_code_obj_info: object = _cache.get(code_obj_py) + if cached_code_obj_info: + # The cache is for new code objects, so, in this case it's already + # using the new code and we can't change it as this is a generator! + # There's still a catch though: even though we don't replace the code, + # we may not want to go into tracing mode (as would usually happen + # when the new_code is None). + func_code_info.new_code = None + breakpoint_found, thread_info.force_stay_in_untraced_mode = \ + cached_code_obj_info.compute_force_stay_in_untraced_mode(breakpoints) + func_code_info.breakpoint_found = breakpoint_found + + elif function_breakpoint: + # Go directly into tracing mode + func_code_info.breakpoint_found = True + func_code_info.new_code = None + + elif breakpoints: + # if DEBUG: + # print('found breakpoints', code_obj_py.co_name, breakpoints) + + # Note: new_code can be None if unable to generate. + # It should automatically put the new code object in the cache. + breakpoint_found, func_code_info.new_code = generate_code_with_breakpoints(code_obj_py, breakpoints) + func_code_info.breakpoint_found = breakpoint_found + + Py_INCREF(func_code_info) + _PyCode_SetExtra( code_obj, _code_extra_index, func_code_info) + + return func_code_info + + +cdef class _CodeLineInfo: + + cdef public dict line_to_offset + cdef public int first_line + cdef public int last_line + + def __init__(self, dict line_to_offset, int first_line, int last_line): + self.line_to_offset = line_to_offset + self.first_line = first_line + self.last_line = last_line + + +# Note: this method has a version in pure-python too. +def _get_code_line_info(code_obj): + line_to_offset: dict = {} + first_line: int = None + last_line: int = None + + cdef int offset + cdef int line + + for offset, line in dis.findlinestarts(code_obj): + line_to_offset[line] = offset + + if line_to_offset: + first_line = min(line_to_offset) + last_line = max(line_to_offset) + return _CodeLineInfo(line_to_offset, first_line, last_line) + + +# Note: this is a cache where the key is the code objects we create ourselves so that +# we always return the same code object for generators. +# (so, we don't have a cache from the old code to the new info -- that's actually +# handled by the cython side in `FuncCodeInfo get_func_code_info` by providing the +# same code info if the debugger mtime is still the same). +_cache: dict = {} + +def get_cached_code_obj_info_py(code_obj_py): + ''' + :return _CacheValue: + :note: on cython use _cache.get(code_obj_py) directly. + ''' + return _cache.get(code_obj_py) + + +cdef class _CacheValue(object): + + cdef public object code_obj_py + cdef public _CodeLineInfo code_line_info + cdef public set breakpoints_hit_at_lines + cdef public set code_lines_as_set + + def __init__(self, object code_obj_py, _CodeLineInfo code_line_info, set breakpoints_hit_at_lines): + ''' + :param code_obj_py: + :param _CodeLineInfo code_line_info: + :param set[int] breakpoints_hit_at_lines: + ''' + self.code_obj_py = code_obj_py + self.code_line_info = code_line_info + self.breakpoints_hit_at_lines = breakpoints_hit_at_lines + self.code_lines_as_set = set(code_line_info.line_to_offset) + + cpdef compute_force_stay_in_untraced_mode(self, breakpoints): + ''' + :param breakpoints: + set(breakpoint_lines) or dict(breakpoint_line->breakpoint info) + :return tuple(breakpoint_found, force_stay_in_untraced_mode) + ''' + cdef bint force_stay_in_untraced_mode + cdef bint breakpoint_found + cdef set target_breakpoints + + force_stay_in_untraced_mode = False + + target_breakpoints = self.code_lines_as_set.intersection(breakpoints) + breakpoint_found = bool(target_breakpoints) + + if not breakpoint_found: + force_stay_in_untraced_mode = True + else: + force_stay_in_untraced_mode = self.breakpoints_hit_at_lines.issuperset(set(breakpoints)) + + return breakpoint_found, force_stay_in_untraced_mode + +def generate_code_with_breakpoints_py(object code_obj_py, dict breakpoints): + return generate_code_with_breakpoints(code_obj_py, breakpoints) + +# DEBUG = True +# debug_helper = DebugHelper() + +cdef generate_code_with_breakpoints(object code_obj_py, dict breakpoints): + ''' + :param breakpoints: + dict where the keys are the breakpoint lines. + :return tuple(breakpoint_found, new_code) + ''' + # The cache is needed for generator functions, because after each yield a new frame + # is created but the former code object is used (so, check if code_to_modify is + # already there and if not cache based on the new code generated). + + cdef bint success + cdef int breakpoint_line + cdef bint breakpoint_found + cdef _CacheValue cache_value + cdef set breakpoints_hit_at_lines + cdef dict line_to_offset + + assert code_obj_py not in _cache, 'If a code object is cached, that same code object must be reused.' + +# if DEBUG: +# initial_code_obj_py = code_obj_py + + code_line_info = _get_code_line_info(code_obj_py) + + success = True + + breakpoints_hit_at_lines = set() + line_to_offset = code_line_info.line_to_offset + + for breakpoint_line in breakpoints: + if breakpoint_line in line_to_offset: + breakpoints_hit_at_lines.add(breakpoint_line) + + if breakpoints_hit_at_lines: + success, new_code = insert_pydevd_breaks( + code_obj_py, + breakpoints_hit_at_lines, + code_line_info + ) + + if not success: + code_obj_py = None + else: + code_obj_py = new_code + + breakpoint_found = bool(breakpoints_hit_at_lines) + if breakpoint_found and success: +# if DEBUG: +# op_number = debug_helper.write_dis( +# 'inserting code, breaks at: %s' % (list(breakpoints),), +# initial_code_obj_py +# ) +# +# debug_helper.write_dis( +# 'after inserting code, breaks at: %s' % (list(breakpoints,)), +# code_obj_py, +# op_number=op_number, +# ) + + cache_value = _CacheValue(code_obj_py, code_line_info, breakpoints_hit_at_lines) + _cache[code_obj_py] = cache_value + + return breakpoint_found, code_obj_py + +import sys + +cdef bint IS_PY_39_OWNARDS = sys.version_info[:2] >= (3, 9) + +def frame_eval_func(): + cdef PyThreadState *state = PyThreadState_Get() + if IS_PY_39_OWNARDS: + state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_39 + else: + state.interp.eval_frame = <_PyFrameEvalFunction *> get_bytecode_while_frame_eval_38 + dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) + + +def stop_frame_eval(): + cdef PyThreadState *state = PyThreadState_Get() + state.interp.eval_frame = _PyEval_EvalFrameDefault + +# During the build we'll generate 2 versions of the code below so that we're compatible with +# Python 3.9, which receives a "PyThreadState* tstate" as the first parameter and Python 3.6-3.8 +# which doesn't. +### TEMPLATE_START +cdef PyObject * get_bytecode_while_frame_eval(PyFrameObject * frame_obj, int exc): + ''' + This function makes the actual evaluation and changes the bytecode to a version + where programmatic breakpoints are added. + ''' + if GlobalDebuggerHolder is None or _thread_local_info is None or exc: + # Sometimes during process shutdown these global variables become None + return CALL_EvalFrameDefault + + # co_filename: str = frame_obj.f_code.co_filename + # if co_filename.endswith('threading.py'): + # return CALL_EvalFrameDefault + + cdef ThreadInfo thread_info + cdef int STATE_SUSPEND = 2 + cdef int CMD_STEP_INTO = 107 + cdef int CMD_STEP_OVER = 108 + cdef int CMD_STEP_OVER_MY_CODE = 159 + cdef int CMD_STEP_INTO_MY_CODE = 144 + cdef int CMD_STEP_INTO_COROUTINE = 206 + cdef int CMD_SMART_STEP_INTO = 128 + cdef bint can_skip = True + try: + thread_info = _thread_local_info.thread_info + except: + thread_info = get_thread_info(frame_obj) + if thread_info is None: + return CALL_EvalFrameDefault + + if thread_info.inside_frame_eval: + return CALL_EvalFrameDefault + + if not thread_info.fully_initialized: + thread_info.initialize_if_possible() + if not thread_info.fully_initialized: + return CALL_EvalFrameDefault + + # Can only get additional_info when fully initialized. + cdef PyDBAdditionalThreadInfo additional_info = thread_info.additional_info + if thread_info.is_pydevd_thread or additional_info.is_tracing: + # Make sure that we don't trace pydevd threads or inside our own calls. + return CALL_EvalFrameDefault + + # frame = frame_obj + # DEBUG = frame.f_code.co_filename.endswith('_debugger_case_tracing.py') + # if DEBUG: + # print('get_bytecode_while_frame_eval', frame.f_lineno, frame.f_code.co_name, frame.f_code.co_filename) + + thread_info.inside_frame_eval += 1 + additional_info.is_tracing = True + try: + main_debugger: object = GlobalDebuggerHolder.global_dbg + if main_debugger is None: + return CALL_EvalFrameDefault + frame = frame_obj + + if thread_info.thread_trace_func is None: + trace_func, apply_to_global = fix_top_level_trace_and_get_trace_func(main_debugger, frame) + if apply_to_global: + thread_info.thread_trace_func = trace_func + + if additional_info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_INTO_COROUTINE, CMD_SMART_STEP_INTO) or \ + main_debugger.break_on_caught_exceptions or \ + main_debugger.break_on_user_uncaught_exceptions or \ + main_debugger.has_plugin_exception_breaks or \ + main_debugger.signature_factory or \ + additional_info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and main_debugger.show_return_values and frame.f_back is additional_info.pydev_step_stop: + + # if DEBUG: + # print('get_bytecode_while_frame_eval enabled trace') + if thread_info.thread_trace_func is not None: + frame.f_trace = thread_info.thread_trace_func + else: + frame.f_trace = main_debugger.trace_dispatch + else: + func_code_info: FuncCodeInfo = get_func_code_info(thread_info, frame_obj, frame_obj.f_code) + # if DEBUG: + # print('get_bytecode_while_frame_eval always skip', func_code_info.always_skip_code) + if not func_code_info.always_skip_code: + + if main_debugger.has_plugin_line_breaks or main_debugger.has_plugin_exception_breaks: + can_skip = main_debugger.plugin.can_skip(main_debugger, frame_obj) + + if not can_skip: + # if DEBUG: + # print('get_bytecode_while_frame_eval not can_skip') + if thread_info.thread_trace_func is not None: + frame.f_trace = thread_info.thread_trace_func + else: + frame.f_trace = main_debugger.trace_dispatch + + if can_skip and func_code_info.breakpoint_found: + # if DEBUG: + # print('get_bytecode_while_frame_eval new_code', func_code_info.new_code) + if not thread_info.force_stay_in_untraced_mode: + # If breakpoints are found but new_code is None, + # this means we weren't able to actually add the code + # where needed, so, fallback to tracing. + if func_code_info.new_code is None: + if thread_info.thread_trace_func is not None: + frame.f_trace = thread_info.thread_trace_func + else: + frame.f_trace = main_debugger.trace_dispatch + else: + # print('Using frame eval break for', frame_obj.f_code.co_name) + update_globals_dict( frame_obj.f_globals) + Py_INCREF(func_code_info.new_code) + old = frame_obj.f_code + frame_obj.f_code = func_code_info.new_code + Py_DECREF(old) + else: + # When we're forcing to stay in traced mode we need to + # update the globals dict (because this means that we're reusing + # a previous code which had breakpoints added in a new frame). + update_globals_dict( frame_obj.f_globals) + + finally: + thread_info.inside_frame_eval -= 1 + additional_info.is_tracing = False + + return CALL_EvalFrameDefault +### TEMPLATE_END diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_tracing.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_tracing.py new file mode 100644 index 0000000..7b34cd5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_tracing.py @@ -0,0 +1,122 @@ +import sys + +from _pydev_bundle import pydev_log +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_comm import get_global_debugger +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER +from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info + + +class DummyTracingHolder: + dummy_trace_func = None + + def set_trace_func(self, trace_func): + self.dummy_trace_func = trace_func + + +dummy_tracing_holder = DummyTracingHolder() + + +def update_globals_dict(globals_dict): + new_globals = {'_pydev_stop_at_break': _pydev_stop_at_break} + globals_dict.update(new_globals) + + +def _get_line_for_frame(frame): + # it's absolutely necessary to reset tracing function for frame in order to get the real line number + tracing_func = frame.f_trace + frame.f_trace = None + line = frame.f_lineno + frame.f_trace = tracing_func + return line + + +def _pydev_stop_at_break(line): + frame = sys._getframe(1) + # print('pydevd SET TRACING at ', line, 'curr line', frame.f_lineno) + t = threading.current_thread() + try: + additional_info = t.additional_info + except: + additional_info = set_additional_thread_info(t) + + if additional_info.is_tracing: + return + + additional_info.is_tracing += 1 + try: + py_db = get_global_debugger() + if py_db is None: + return + + pydev_log.debug("Setting f_trace due to frame eval mode in file: %s on line %s", frame.f_code.co_filename, line) + additional_info.trace_suspend_type = 'frame_eval' + + pydevd_frame_eval_cython_wrapper = sys.modules['_pydevd_frame_eval.pydevd_frame_eval_cython_wrapper'] + thread_info = pydevd_frame_eval_cython_wrapper.get_thread_info_py() + if thread_info.thread_trace_func is not None: + frame.f_trace = thread_info.thread_trace_func + else: + frame.f_trace = py_db.get_thread_local_trace_func() + finally: + additional_info.is_tracing -= 1 + + +def _pydev_needs_stop_at_break(line): + ''' + We separate the functionality into 2 functions so that we can generate a bytecode which + generates a spurious line change so that we can do: + + if _pydev_needs_stop_at_break(): + # Set line to line -1 + _pydev_stop_at_break() + # then, proceed to go to the current line + # (which will then trigger a line event). + ''' + t = threading.current_thread() + try: + additional_info = t.additional_info + except: + additional_info = set_additional_thread_info(t) + + if additional_info.is_tracing: + return False + + additional_info.is_tracing += 1 + try: + frame = sys._getframe(1) + # print('pydev needs stop at break?', line, 'curr line', frame.f_lineno, 'curr trace', frame.f_trace) + if frame.f_trace is not None: + # i.e.: this frame is already being traced, thus, we don't need to use programmatic breakpoints. + return False + + py_db = get_global_debugger() + if py_db is None: + return False + + try: + abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + canonical_normalized_filename = abs_path_real_path_and_base[1] + + try: + python_breakpoint = py_db.breakpoints[canonical_normalized_filename][line] + except: + # print("Couldn't find breakpoint in the file %s on line %s" % (frame.f_code.co_filename, line)) + # Could be KeyError if line is not there or TypeError if breakpoints_for_file is None. + # Note: using catch-all exception for performance reasons (if the user adds a breakpoint + # and then removes it after hitting it once, this method added for the programmatic + # breakpoint will keep on being called and one of those exceptions will always be raised + # here). + return False + + if python_breakpoint: + # print('YES') + return True + + finally: + additional_info.is_tracing -= 1 + + return False + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_modify_bytecode.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_modify_bytecode.py new file mode 100644 index 0000000..7e76358 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_modify_bytecode.py @@ -0,0 +1,365 @@ +from collections import namedtuple +import dis +from functools import partial +import itertools +import os.path +import sys + +from _pydevd_frame_eval.vendored import bytecode +from _pydevd_frame_eval.vendored.bytecode.instr import Instr, Label +from _pydev_bundle import pydev_log +from _pydevd_frame_eval.pydevd_frame_tracing import _pydev_stop_at_break, _pydev_needs_stop_at_break + +DEBUG = False + + +class DebugHelper(object): + + def __init__(self): + self._debug_dir = os.path.join(os.path.dirname(__file__), 'debug_info') + try: + os.makedirs(self._debug_dir) + except: + pass + self._next = partial(next, itertools.count(0)) + + def _get_filename(self, op_number=None, prefix=''): + if op_number is None: + op_number = self._next() + name = '%03d_before.txt' % op_number + else: + name = '%03d_change.txt' % op_number + + filename = os.path.join(self._debug_dir, prefix + name) + return filename, op_number + + def write_bytecode(self, b, op_number=None, prefix=''): + filename, op_number = self._get_filename(op_number, prefix) + with open(filename, 'w') as stream: + bytecode.dump_bytecode(b, stream=stream, lineno=True) + return op_number + + def write_dis(self, code_to_modify, op_number=None, prefix=''): + filename, op_number = self._get_filename(op_number, prefix) + with open(filename, 'w') as stream: + stream.write('-------- ') + stream.write('-------- ') + stream.write('id(code_to_modify): %s' % id(code_to_modify)) + stream.write('\n\n') + dis.dis(code_to_modify, file=stream) + return op_number + + +_CodeLineInfo = namedtuple('_CodeLineInfo', 'line_to_offset, first_line, last_line') + + +# Note: this method has a version in cython too (that one is usually used, this is just for tests). +def _get_code_line_info(code_obj): + line_to_offset = {} + first_line = None + last_line = None + + for offset, line in dis.findlinestarts(code_obj): + line_to_offset[line] = offset + + if line_to_offset: + first_line = min(line_to_offset) + last_line = max(line_to_offset) + return _CodeLineInfo(line_to_offset, first_line, last_line) + + +if DEBUG: + debug_helper = DebugHelper() + + +def get_instructions_to_add( + stop_at_line, + _pydev_stop_at_break=_pydev_stop_at_break, + _pydev_needs_stop_at_break=_pydev_needs_stop_at_break + ): + ''' + This is the bytecode for something as: + + if _pydev_needs_stop_at_break(): + _pydev_stop_at_break() + + but with some special handling for lines. + ''' + # Good reference to how things work regarding line numbers and jumps: + # https://github.com/python/cpython/blob/3.6/Objects/lnotab_notes.txt + + # Usually use a stop line -1, but if that'd be 0, using line +1 is ok too. + spurious_line = stop_at_line - 1 + if spurious_line <= 0: + spurious_line = stop_at_line + 1 + + label = Label() + return [ + # -- if _pydev_needs_stop_at_break(): + Instr("LOAD_CONST", _pydev_needs_stop_at_break, lineno=stop_at_line), + Instr("LOAD_CONST", stop_at_line, lineno=stop_at_line), + Instr("CALL_FUNCTION", 1, lineno=stop_at_line), + Instr("POP_JUMP_IF_FALSE", label, lineno=stop_at_line), + + # -- _pydev_stop_at_break() + # + # Note that this has line numbers -1 so that when the NOP just below + # is executed we have a spurious line event. + Instr("LOAD_CONST", _pydev_stop_at_break, lineno=spurious_line), + Instr("LOAD_CONST", stop_at_line, lineno=spurious_line), + Instr("CALL_FUNCTION", 1, lineno=spurious_line), + Instr("POP_TOP", lineno=spurious_line), + + # Reason for the NOP: Python will give us a 'line' trace event whenever we forward jump to + # the first instruction of a line, so, in the case where we haven't added a programmatic + # breakpoint (either because we didn't hit a breakpoint anymore or because it was already + # tracing), we don't want the spurious line event due to the line change, so, we make a jump + # to the instruction right after the NOP so that the spurious line event is NOT generated in + # this case (otherwise we'd have a line event even if the line didn't change). + Instr("NOP", lineno=stop_at_line), + label, + ] + + +class _Node(object): + + def __init__(self, data): + self.prev = None + self.next = None + self.data = data + + def append(self, data): + node = _Node(data) + + curr_next = self.next + + node.next = self.next + node.prev = self + self.next = node + + if curr_next is not None: + curr_next.prev = node + + return node + + def prepend(self, data): + node = _Node(data) + + curr_prev = self.prev + + node.prev = self.prev + node.next = self + self.prev = node + + if curr_prev is not None: + curr_prev.next = node + + return node + + +class _HelperBytecodeList(object): + ''' + A helper double-linked list to make the manipulation a bit easier (so that we don't need + to keep track of indices that change) and performant (because adding multiple items to + the middle of a regular list isn't ideal). + ''' + + def __init__(self, lst=None): + self._head = None + self._tail = None + if lst: + node = self + for item in lst: + node = node.append(item) + + def append(self, data): + if self._tail is None: + node = _Node(data) + self._head = self._tail = node + return node + else: + node = self._tail = self.tail.append(data) + return node + + @property + def head(self): + node = self._head + # Manipulating the node directly may make it unsynchronized. + while node.prev: + self._head = node = node.prev + return node + + @property + def tail(self): + node = self._tail + # Manipulating the node directly may make it unsynchronized. + while node.next: + self._tail = node = node.next + return node + + def __iter__(self): + node = self.head + + while node: + yield node.data + node = node.next + + +_PREDICT_TABLE = { + 'LIST_APPEND': ('JUMP_ABSOLUTE',), + 'SET_ADD': ('JUMP_ABSOLUTE',), + 'GET_ANEXT': ('LOAD_CONST',), + 'GET_AWAITABLE': ('LOAD_CONST',), + 'DICT_MERGE': ('CALL_FUNCTION_EX',), + 'MAP_ADD': ('JUMP_ABSOLUTE',), + 'COMPARE_OP': ('POP_JUMP_IF_FALSE', 'POP_JUMP_IF_TRUE',), + 'IS_OP': ('POP_JUMP_IF_FALSE', 'POP_JUMP_IF_TRUE',), + 'CONTAINS_OP': ('POP_JUMP_IF_FALSE', 'POP_JUMP_IF_TRUE',), + + # Note: there are some others with PREDICT on ceval, but they have more logic + # and it needs more experimentation to know how it behaves in the static generated + # code (and it's only an issue for us if there's actually a line change between + # those, so, we don't have to really handle all the cases, only the one where + # the line number actually changes from one instruction to the predicted one). +} + +# 3.10 optimizations include copying code branches multiple times (for instance +# if the body of a finally has a single assign statement it can copy the assign to the case +# where an exception happens and doesn't happen for optimization purposes) and as such +# we need to add the programmatic breakpoint multiple times. +TRACK_MULTIPLE_BRANCHES = sys.version_info[:2] >= (3, 10) + +# When tracking multiple branches, we try to fix the bytecodes which would be PREDICTED in the +# Python eval loop so that we don't have spurious line events that wouldn't usually be issued +# in the tracing as they're ignored due to the eval prediction (even though they're in the bytecode). +FIX_PREDICT = sys.version_info[:2] >= (3, 10) + + +def insert_pydevd_breaks( + code_to_modify, + breakpoint_lines, + code_line_info=None, + _pydev_stop_at_break=_pydev_stop_at_break, + _pydev_needs_stop_at_break=_pydev_needs_stop_at_break, + ): + """ + Inserts pydevd programmatic breaks into the code (at the given lines). + + :param breakpoint_lines: set with the lines where we should add breakpoints. + :return: tuple(boolean flag whether insertion was successful, modified code). + """ + if code_line_info is None: + code_line_info = _get_code_line_info(code_to_modify) + + if not code_line_info.line_to_offset: + return False, code_to_modify + + # Create a copy (and make sure we're dealing with a set). + breakpoint_lines = set(breakpoint_lines) + + # Note that we can even generate breakpoints on the first line of code + # now, since we generate a spurious line event -- it may be a bit pointless + # as we'll stop in the first line and we don't currently stop the tracing after the + # user resumes, but in the future, if we do that, this would be a nice + # improvement. + # if code_to_modify.co_firstlineno in breakpoint_lines: + # return False, code_to_modify + + for line in breakpoint_lines: + if line <= 0: + # The first line is line 1, so, a break at line 0 is not valid. + pydev_log.info('Trying to add breakpoint in invalid line: %s', line) + return False, code_to_modify + + try: + b = bytecode.Bytecode.from_code(code_to_modify) + + if DEBUG: + op_number_bytecode = debug_helper.write_bytecode(b, prefix='bytecode.') + + helper_list = _HelperBytecodeList(b) + + modified_breakpoint_lines = breakpoint_lines.copy() + + curr_node = helper_list.head + added_breaks_in_lines = set() + last_lineno = None + while curr_node is not None: + instruction = curr_node.data + instruction_lineno = getattr(instruction, 'lineno', None) + curr_name = getattr(instruction, 'name', None) + + if FIX_PREDICT: + predict_targets = _PREDICT_TABLE.get(curr_name) + if predict_targets: + # Odd case: the next instruction may have a line number but it doesn't really + # appear in the tracing due to the PREDICT() in ceval, so, fix the bytecode so + # that it does things the way that ceval actually interprets it. + # See: https://mail.python.org/archives/list/python-dev@python.org/thread/CP2PTFCMTK57KM3M3DLJNWGO66R5RVPB/ + next_instruction = curr_node.next.data + next_name = getattr(next_instruction, 'name', None) + if next_name in predict_targets: + next_instruction_lineno = getattr(next_instruction, 'lineno', None) + if next_instruction_lineno: + next_instruction.lineno = None + + if instruction_lineno is not None: + if TRACK_MULTIPLE_BRANCHES: + if last_lineno is None: + last_lineno = instruction_lineno + else: + if last_lineno == instruction_lineno: + # If the previous is a label, someone may jump into it, so, we need to add + # the break even if it's in the same line. + if curr_node.prev.data.__class__ != Label: + # Skip adding this as the line is still the same. + curr_node = curr_node.next + continue + last_lineno = instruction_lineno + else: + if instruction_lineno in added_breaks_in_lines: + curr_node = curr_node.next + continue + + if instruction_lineno in modified_breakpoint_lines: + added_breaks_in_lines.add(instruction_lineno) + if curr_node.prev is not None and curr_node.prev.data.__class__ == Label \ + and curr_name == 'POP_TOP': + + # If we have a SETUP_FINALLY where the target is a POP_TOP, we can't change + # the target to be the breakpoint instruction (this can crash the interpreter). + + for new_instruction in get_instructions_to_add( + instruction_lineno, + _pydev_stop_at_break=_pydev_stop_at_break, + _pydev_needs_stop_at_break=_pydev_needs_stop_at_break, + ): + curr_node = curr_node.append(new_instruction) + + else: + for new_instruction in get_instructions_to_add( + instruction_lineno, + _pydev_stop_at_break=_pydev_stop_at_break, + _pydev_needs_stop_at_break=_pydev_needs_stop_at_break, + ): + curr_node.prepend(new_instruction) + + curr_node = curr_node.next + + b[:] = helper_list + + if DEBUG: + debug_helper.write_bytecode(b, op_number_bytecode, prefix='bytecode.') + + new_code = b.to_code() + + except: + pydev_log.exception('Error inserting pydevd breaks.') + return False, code_to_modify + + if DEBUG: + op_number = debug_helper.write_dis(code_to_modify) + debug_helper.write_dis(new_code, op_number) + + return True, new_code + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/release_mem.h b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/release_mem.h new file mode 120000 index 0000000..8176b68 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/release_mem.h @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/31/b3/02/34941791c259f1453bc681f8e0cc2a1374115995f9582033ef30a3bba6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/README.txt b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/README.txt new file mode 120000 index 0000000..be58b95 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/README.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8e/04/8f/b0c3b719cf2770d51ee51c1dc5d541f981f2022a1630f5c7703e8a6d01 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..1d73f2e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__pycache__/pydevd_fix_code.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__pycache__/pydevd_fix_code.cpython-38.pyc new file mode 100644 index 0000000..f06f671 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__pycache__/pydevd_fix_code.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/COPYING b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/COPYING new file mode 120000 index 0000000..29328bc --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/COPYING @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6d/a5/a4/9be4ded8b2d44702bb4cbd333a431256354253f7b3bce6c7068dbdf139 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/INSTALLER new file mode 120000 index 0000000..d8b8e5c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/INSTALLER @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ce/eb/ae/7b8927a3227e5303cf5e0f1f7b34bb542ad7250ac03fbcde36ec2f1508 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/METADATA b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/METADATA new file mode 120000 index 0000000..91f59db --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f5/76/9d/0cae984d0f853e8c182390d2e22780ee14469d13ff7cce59f62a0f9044 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/RECORD b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/RECORD new file mode 120000 index 0000000..c03f368 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/RECORD @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/da/e7/47/4eda60417ba4ccb6a3ecc55fac98416b8d2157b4a33fcbf2679bcda8c5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/REQUESTED new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/REQUESTED @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/WHEEL b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/WHEEL new file mode 120000 index 0000000..5c55a1c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3a/a4/64/174798e461ecb0ca2b16395b4c8ab4ef6be91e917ad1f21003a952f710 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/direct_url.json new file mode 120000 index 0000000..56336b0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/direct_url.json @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b3/9f/11/6f829746530ac64fa6ce9bebfed25143e1f1f3e0c7b14e8d7688429c08 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/top_level.txt new file mode 120000 index 0000000..da34f9f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f4/18/5d/07b1ea619f8fbc7368597ea29702d8590a9c80438bc1d6f76979b91b2b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__init__.py new file mode 120000 index 0000000..fe26821 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f6/10/a9/94100657664d6c8e93924902f9979f93f4b16deb005707b6897b5d48f4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..3d036f8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/bytecode.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/bytecode.cpython-38.pyc new file mode 100644 index 0000000..8e0cc9a Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/bytecode.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/cfg.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/cfg.cpython-38.pyc new file mode 100644 index 0000000..785ffbc Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/cfg.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/concrete.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/concrete.cpython-38.pyc new file mode 100644 index 0000000..7ace182 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/concrete.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/flags.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/flags.cpython-38.pyc new file mode 100644 index 0000000..a6fe0be Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/flags.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/instr.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/instr.cpython-38.pyc new file mode 100644 index 0000000..e100bd6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/instr.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/peephole_opt.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/peephole_opt.cpython-38.pyc new file mode 100644 index 0000000..eae8a13 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__pycache__/peephole_opt.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/bytecode.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/bytecode.py new file mode 120000 index 0000000..90f295b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/bytecode.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/50/00/82/3e0d02b8cf5123334c24baaf2e50b378c04d235519f9602ca733bbad03 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/cfg.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/cfg.py new file mode 120000 index 0000000..99d1c60 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/cfg.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fe/78/2d/77a2d3745ef843b78cd24bfe8d8ef463ed66d9d60e55374bdcb0018ba5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/concrete.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/concrete.py new file mode 120000 index 0000000..323111b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/concrete.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d0/2a/04/679b3154ae812f2b49d4a236a998126fdf9400c917c1ad4688f682dda8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/flags.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/flags.py new file mode 120000 index 0000000..006d980 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/flags.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8a/94/2a/950baf1e7ff3c235a468f95fffe2cf4e339371acaeb774feb008a36105 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/instr.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/instr.py new file mode 120000 index 0000000..e997ecf --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/instr.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cf/9c/e0/73a7492d695a3b1a66884ae6477aa858afa96eca854390e7661631a7dc \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/peephole_opt.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/peephole_opt.py new file mode 120000 index 0000000..62056b1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/peephole_opt.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3b/ba/b5/f6ad2c0e2c0de3356414676671e4e128ae1b4183ffd770c521b1fca3c3 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__init__.py new file mode 120000 index 0000000..1015f5b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/aa/5a/49/ee78af39a8c4b4dee622383fc298903c18239e4372a55cb529d6e6944c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..17fa0aa Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_bytecode.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_bytecode.cpython-38.pyc new file mode 100644 index 0000000..606bc73 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_bytecode.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_cfg.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_cfg.cpython-38.pyc new file mode 100644 index 0000000..d563da9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_cfg.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_code.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_code.cpython-38.pyc new file mode 100644 index 0000000..ee019e2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_code.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_concrete.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_concrete.cpython-38.pyc new file mode 100644 index 0000000..3a31e93 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_concrete.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_flags.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_flags.cpython-38.pyc new file mode 100644 index 0000000..6f7cec0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_flags.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_instr.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_instr.cpython-38.pyc new file mode 100644 index 0000000..d94d4a8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_instr.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_misc.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_misc.cpython-38.pyc new file mode 100644 index 0000000..63749be Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_misc.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_peephole_opt.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_peephole_opt.cpython-38.pyc new file mode 100644 index 0000000..0ec5d61 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/test_peephole_opt.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/util_annotation.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/util_annotation.cpython-38.pyc new file mode 100644 index 0000000..ef7a1b3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__pycache__/util_annotation.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_bytecode.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_bytecode.py new file mode 120000 index 0000000..82cda03 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_bytecode.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a3/14/79/2c7c98ecca7c02735de6007d512d44d872cfd7f8a428367d3b5c9b07d8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_cfg.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_cfg.py new file mode 120000 index 0000000..1bca0fa --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_cfg.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ac/39/52/b01f524c2820be5efda6fd6aeae294c449736adf7dcfcfca70e6dc6dbf \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_code.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_code.py new file mode 120000 index 0000000..060256b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_code.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e6/9f/7c/1430a91c6d86c54a683276b0e52e9205a93278ca9ae5e5f6f67ace9aea \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_concrete.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_concrete.py new file mode 120000 index 0000000..d9097ea --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_concrete.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1c/a1/40/83dea264a1230cca5a3f06b62ebc0445cb5d59409309c9efa3eb0a9c47 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_flags.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_flags.py new file mode 120000 index 0000000..971ba85 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_flags.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bc/17/96/1811d632f278c8ded174928899266eae3bb2786433ee91e97810402832 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_instr.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_instr.py new file mode 120000 index 0000000..4a991d7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_instr.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/85/0e/84/caa49d76325e52fe2f85916b122cb5c71322aa9c83b822b4c5f29b81ff \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_misc.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_misc.py new file mode 120000 index 0000000..e6720f7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_misc.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cf/42/b9/3b5e929007fcd4896374394aba64bec7be874ab7f9b67346b6c4cbb885 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_peephole_opt.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_peephole_opt.py new file mode 120000 index 0000000..16f1758 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_peephole_opt.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/af/af/b1/20f9111e14259d359792dd2c534a74af503cd048032a81494f11362760 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/util_annotation.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/util_annotation.py new file mode 120000 index 0000000..13b4c02 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/util_annotation.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c0/aa/ba/c8f5abce4365868965e58d746f7563b82928898560bdc22393ff23cdff \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/pydevd_fix_code.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/pydevd_fix_code.py new file mode 120000 index 0000000..61fbd89 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/pydevd_fix_code.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d0/83/0d/a958946fd34ce603f0f4dbfd2946b2f7d5e42e85b84e7925ba9624743b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_app_engine_debug_startup.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_app_engine_debug_startup.py new file mode 120000 index 0000000..c518db5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_app_engine_debug_startup.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fa/00/35/509f29458dd513c6d173b2615919b19516a743c406dfbdb83b98a878a0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_coverage.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_coverage.py new file mode 100644 index 0000000..665e87b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_coverage.py @@ -0,0 +1,94 @@ +''' +Entry point module to run code-coverage. +''' + + +def is_valid_py_file(path): + ''' + Checks whether the file can be read by the coverage module. This is especially + needed for .pyx files and .py files with syntax errors. + ''' + import os + + is_valid = False + if os.path.isfile(path) and not os.path.splitext(path)[1] == '.pyx': + try: + with open(path, 'rb') as f: + compile(f.read(), path, 'exec') + is_valid = True + except: + pass + return is_valid + + +def execute(): + import os + import sys + + files = None + if 'combine' not in sys.argv: + + if '--pydev-analyze' in sys.argv: + + # Ok, what we want here is having the files passed through stdin (because + # there may be too many files for passing in the command line -- we could + # just pass a dir and make the find files here, but as that's already + # given in the java side, let's just gather that info here). + sys.argv.remove('--pydev-analyze') + s = input() + s = s.replace('\r', '') + s = s.replace('\n', '') + + files = [] + invalid_files = [] + for v in s.split('|'): + if is_valid_py_file(v): + files.append(v) + else: + invalid_files.append(v) + if invalid_files: + sys.stderr.write('Invalid files not passed to coverage: %s\n' + % ', '.join(invalid_files)) + + # Note that in this case we'll already be in the working dir with the coverage files, + # so, the coverage file location is not passed. + + else: + # For all commands, the coverage file is configured in pydev, and passed as the first + # argument in the command line, so, let's make sure this gets to the coverage module. + os.environ['COVERAGE_FILE'] = sys.argv[1] + del sys.argv[1] + + try: + import coverage # @UnresolvedImport + except: + sys.stderr.write('Error: coverage module could not be imported\n') + sys.stderr.write('Please make sure that the coverage module ' + '(http://nedbatchelder.com/code/coverage/)\n') + sys.stderr.write('is properly installed in your interpreter: %s\n' % (sys.executable,)) + + import traceback;traceback.print_exc() + return + + if hasattr(coverage, '__version__'): + version = tuple(map(int, coverage.__version__.split('.')[:2])) + if version < (4, 3): + sys.stderr.write('Error: minimum supported coverage version is 4.3.' + '\nFound: %s\nLocation: %s\n' + % ('.'.join(str(x) for x in version), coverage.__file__)) + sys.exit(1) + else: + sys.stderr.write('Warning: Could not determine version of python module coverage.' + '\nEnsure coverage version is >= 4.3\n') + + from coverage.cmdline import main # @UnresolvedImport + + if files is not None: + sys.argv.append('xml') + sys.argv += files + + main() + + +if __name__ == '__main__': + execute() diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/README b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/README new file mode 120000 index 0000000..224bad3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/README @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ae/f2/16/0d4a0db0fc484d283a114bb72fd0e28666540b0c7af2f42caa8fc54908 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d4c5abd Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhook.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhook.cpython-38.pyc new file mode 100644 index 0000000..045bebc Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhook.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookglut.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookglut.cpython-38.pyc new file mode 100644 index 0000000..a70de12 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookglut.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookgtk.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookgtk.cpython-38.pyc new file mode 100644 index 0000000..33b238c Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookgtk.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookgtk3.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookgtk3.cpython-38.pyc new file mode 100644 index 0000000..be76550 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookgtk3.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookpyglet.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookpyglet.cpython-38.pyc new file mode 100644 index 0000000..9d1220d Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookpyglet.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookqt4.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookqt4.cpython-38.pyc new file mode 100644 index 0000000..b5f03bb Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookqt4.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookqt5.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookqt5.cpython-38.pyc new file mode 100644 index 0000000..26f917f Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookqt5.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhooktk.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhooktk.cpython-38.pyc new file mode 100644 index 0000000..c06331b Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhooktk.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookwx.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookwx.cpython-38.pyc new file mode 100644 index 0000000..0f0336d Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/inputhookwx.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/matplotlibtools.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/matplotlibtools.cpython-38.pyc new file mode 100644 index 0000000..c988632 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/matplotlibtools.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt.cpython-38.pyc new file mode 100644 index 0000000..016ea12 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt_for_kernel.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt_for_kernel.cpython-38.pyc new file mode 100644 index 0000000..7dcfebd Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt_for_kernel.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt_loaders.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt_loaders.cpython-38.pyc new file mode 100644 index 0000000..5fd3c18 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/qt_loaders.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/version.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000..f6baa6b Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/__pycache__/version.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhook.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhook.py new file mode 120000 index 0000000..228e261 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhook.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1a/38/5f/302d30be1486b0329339310eb741a5e8595e692f35c949f80311889596 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookglut.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookglut.py new file mode 100644 index 0000000..d65add9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookglut.py @@ -0,0 +1,154 @@ +# coding: utf-8 +""" +GLUT Inputhook support functions +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +# GLUT is quite an old library and it is difficult to ensure proper +# integration within IPython since original GLUT does not allow to handle +# events one by one. Instead, it requires for the mainloop to be entered +# and never returned (there is not even a function to exit he +# mainloop). Fortunately, there are alternatives such as freeglut +# (available for linux and windows) and the OSX implementation gives +# access to a glutCheckLoop() function that blocks itself until a new +# event is received. This means we have to setup the idle callback to +# ensure we got at least one event that will unblock the function. +# +# Furthermore, it is not possible to install these handlers without a window +# being first created. We choose to make this window invisible. This means that +# display mode options are set at this level and user won't be able to change +# them later without modifying the code. This should probably be made available +# via IPython options system. + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- +import os +import sys +from _pydev_bundle._pydev_saved_modules import time +import signal +import OpenGL.GLUT as glut # @UnresolvedImport +import OpenGL.platform as platform # @UnresolvedImport +from timeit import default_timer as clock +from pydev_ipython.inputhook import stdin_ready + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +# Frame per second : 60 +# Should probably be an IPython option +glut_fps = 60 + +# Display mode : double buffeed + rgba + depth +# Should probably be an IPython option +glut_display_mode = (glut.GLUT_DOUBLE | + glut.GLUT_RGBA | + glut.GLUT_DEPTH) + +glutMainLoopEvent = None +if sys.platform == 'darwin': + try: + glutCheckLoop = platform.createBaseFunction( + 'glutCheckLoop', dll=platform.GLUT, resultType=None, + argTypes=[], + doc='glutCheckLoop( ) -> None', + argNames=(), + ) + except AttributeError: + raise RuntimeError( + '''Your glut implementation does not allow interactive sessions''' + '''Consider installing freeglut.''') + glutMainLoopEvent = glutCheckLoop +elif glut.HAVE_FREEGLUT: + glutMainLoopEvent = glut.glutMainLoopEvent +else: + raise RuntimeError( + '''Your glut implementation does not allow interactive sessions. ''' + '''Consider installing freeglut.''') + +#----------------------------------------------------------------------------- +# Callback functions +#----------------------------------------------------------------------------- + + +def glut_display(): + # Dummy display function + pass + + +def glut_idle(): + # Dummy idle function + pass + + +def glut_close(): + # Close function only hides the current window + glut.glutHideWindow() + glutMainLoopEvent() + + +def glut_int_handler(signum, frame): + # Catch sigint and print the defautl message + signal.signal(signal.SIGINT, signal.default_int_handler) + print('\nKeyboardInterrupt') + # Need to reprint the prompt at this stage + + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- +def inputhook_glut(): + """Run the pyglet event loop by processing pending events only. + + This keeps processing pending events until stdin is ready. After + processing all pending events, a call to time.sleep is inserted. This is + needed, otherwise, CPU usage is at 100%. This sleep time should be tuned + though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + + signal.signal(signal.SIGINT, glut_int_handler) + + try: + t = clock() + + # Make sure the default window is set after a window has been closed + if glut.glutGetWindow() == 0: + glut.glutSetWindow(1) + glutMainLoopEvent() + return 0 + + while not stdin_ready(): + glutMainLoopEvent() + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + except KeyboardInterrupt: + pass + return 0 diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk.py new file mode 120000 index 0000000..532282e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2c/32/e0/d2d0d76e6bfe62838345e40ab4ebb901adc4081afd1d468d255d54624d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk3.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk3.py new file mode 120000 index 0000000..b30fd20 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk3.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4a/1d/fc/db1af6e73b4a0107c2841366e03f8d0b952863e1e8d0b4c88f78b85a2f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookpyglet.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookpyglet.py new file mode 100644 index 0000000..5529307 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookpyglet.py @@ -0,0 +1,92 @@ +# encoding: utf-8 +""" +Enable pyglet to be used interacive by setting PyOS_InputHook. + +Authors +------- + +* Nicolas P. Rougier +* Fernando Perez +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import sys +from _pydev_bundle._pydev_saved_modules import time +from timeit import default_timer as clock +import pyglet # @UnresolvedImport +from pydev_ipython.inputhook import stdin_ready + + +# On linux only, window.flip() has a bug that causes an AttributeError on +# window close. For details, see: +# http://groups.google.com/group/pyglet-users/browse_thread/thread/47c1aab9aa4a3d23/c22f9e819826799e?#c22f9e819826799e + +if sys.platform.startswith('linux'): + def flip(window): + try: + window.flip() + except AttributeError: + pass +else: + def flip(window): + window.flip() + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def inputhook_pyglet(): + """Run the pyglet event loop by processing pending events only. + + This keeps processing pending events until stdin is ready. After + processing all pending events, a call to time.sleep is inserted. This is + needed, otherwise, CPU usage is at 100%. This sleep time should be tuned + though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + try: + t = clock() + while not stdin_ready(): + pyglet.clock.tick() + for window in pyglet.app.windows: + window.switch_to() + window.dispatch_events() + window.dispatch_event('on_draw') + flip(window) + + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + except KeyboardInterrupt: + pass + return 0 diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookqt4.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookqt4.py new file mode 120000 index 0000000..cf390ef --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookqt4.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0c/a3/a8/6c4bc7e9b22492f385e7cd558fead05d649453a7fb70473d7f35ebdff8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookqt5.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookqt5.py new file mode 120000 index 0000000..9b10b79 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookqt5.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f3/6a/7a/98b32b69d580cb31e1f5f6876ff7902518a0220696ef85916cd2daa057 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhooktk.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhooktk.py new file mode 120000 index 0000000..21499d7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhooktk.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6d/6e/e1/2d5bf624eb8fd344d27aa230f4edca28d547b410691396c70125be6d2a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookwx.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookwx.py new file mode 100644 index 0000000..c2e4b91 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/inputhookwx.py @@ -0,0 +1,169 @@ +# encoding: utf-8 +""" +Enable wxPython to be used interacive by setting PyOS_InputHook. + +Authors: Robin Dunn, Brian Granger, Ondrej Certik +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys +import signal +from _pydev_bundle._pydev_saved_modules import time +from timeit import default_timer as clock +import wx + +from pydev_ipython.inputhook import stdin_ready + + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def inputhook_wx1(): + """Run the wx event loop by processing pending events only. + + This approach seems to work, but its performance is not great as it + relies on having PyOS_InputHook called regularly. + """ + try: + app = wx.GetApp() # @UndefinedVariable + if app is not None: + assert wx.Thread_IsMain() # @UndefinedVariable + + # Make a temporary event loop and process system events until + # there are no more waiting, then allow idle events (which + # will also deal with pending or posted wx events.) + evtloop = wx.EventLoop() # @UndefinedVariable + ea = wx.EventLoopActivator(evtloop) # @UndefinedVariable + while evtloop.Pending(): + evtloop.Dispatch() + app.ProcessIdle() + del ea + except KeyboardInterrupt: + pass + return 0 + +class EventLoopTimer(wx.Timer): # @UndefinedVariable + + def __init__(self, func): + self.func = func + wx.Timer.__init__(self) # @UndefinedVariable + + def Notify(self): + self.func() + +class EventLoopRunner(object): + + def Run(self, time): + self.evtloop = wx.EventLoop() # @UndefinedVariable + self.timer = EventLoopTimer(self.check_stdin) + self.timer.Start(time) + self.evtloop.Run() + + def check_stdin(self): + if stdin_ready(): + self.timer.Stop() + self.evtloop.Exit() + +def inputhook_wx2(): + """Run the wx event loop, polling for stdin. + + This version runs the wx eventloop for an undetermined amount of time, + during which it periodically checks to see if anything is ready on + stdin. If anything is ready on stdin, the event loop exits. + + The argument to elr.Run controls how often the event loop looks at stdin. + This determines the responsiveness at the keyboard. A setting of 1000 + enables a user to type at most 1 char per second. I have found that a + setting of 10 gives good keyboard response. We can shorten it further, + but eventually performance would suffer from calling select/kbhit too + often. + """ + try: + app = wx.GetApp() # @UndefinedVariable + if app is not None: + assert wx.Thread_IsMain() # @UndefinedVariable + elr = EventLoopRunner() + # As this time is made shorter, keyboard response improves, but idle + # CPU load goes up. 10 ms seems like a good compromise. + elr.Run(time=10) # CHANGE time here to control polling interval + except KeyboardInterrupt: + pass + return 0 + +def inputhook_wx3(): + """Run the wx event loop by processing pending events only. + + This is like inputhook_wx1, but it keeps processing pending events + until stdin is ready. After processing all pending events, a call to + time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. + This sleep time should be tuned though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + try: + app = wx.GetApp() # @UndefinedVariable + if app is not None: + if hasattr(wx, 'IsMainThread'): + assert wx.IsMainThread() # @UndefinedVariable + else: + assert wx.Thread_IsMain() # @UndefinedVariable + + # The import of wx on Linux sets the handler for signal.SIGINT + # to 0. This is a bug in wx or gtk. We fix by just setting it + # back to the Python default. + if not callable(signal.getsignal(signal.SIGINT)): + signal.signal(signal.SIGINT, signal.default_int_handler) + + evtloop = wx.EventLoop() # @UndefinedVariable + ea = wx.EventLoopActivator(evtloop) # @UndefinedVariable + t = clock() + while not stdin_ready(): + while evtloop.Pending(): + t = clock() + evtloop.Dispatch() + app.ProcessIdle() + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + del ea + except KeyboardInterrupt: + pass + return 0 + +if sys.platform == 'darwin': + # On OSX, evtloop.Pending() always returns True, regardless of there being + # any events pending. As such we can't use implementations 1 or 3 of the + # inputhook as those depend on a pending/dispatch loop. + inputhook_wx = inputhook_wx2 +else: + # This is our default implementation + inputhook_wx = inputhook_wx3 diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/matplotlibtools.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/matplotlibtools.py new file mode 120000 index 0000000..508c68e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/matplotlibtools.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6d/94/88/618d3d0a7cbdebe369c4e75b994d25b9e3446827c88493fcec3f9d6c57 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/qt.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/qt.py new file mode 120000 index 0000000..50e4b0b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/qt.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2d/ec/be/ec7fc59f8d336ba9492a67a40beaf4b8e4de3a09c7e85206501686a8ff \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/qt_for_kernel.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/qt_for_kernel.py new file mode 120000 index 0000000..300120b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/qt_for_kernel.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1c/5e/bc/5a55d1d3c5bafb80770e3b85fb901b34430babe357215b1ca125131057 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/qt_loaders.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/qt_loaders.py new file mode 120000 index 0000000..deabd19 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/qt_loaders.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/67/1c/96/3c67c2b5d61b1abb49e3d064016bbb0845bac37f158d29e8784cef5a33 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/version.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/version.py new file mode 120000 index 0000000..7aac396 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/version.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/94/b0/52/47c9ad945d5e0b3c3039e8e58dc840c9f4b2d28a43f1bd30fd08d1f7b4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_pysrc.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_pysrc.py new file mode 120000 index 0000000..a1d09dd --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_pysrc.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2c/ab/70/4320d862bcb79612fb61a96099e860583f36f8d0e9a906188b56d3623f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_run_in_console.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_run_in_console.py new file mode 100644 index 0000000..a87a0e4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_run_in_console.py @@ -0,0 +1,153 @@ +''' +Entry point module to run a file in the interactive console. +''' +import os +import sys +import traceback +from pydevconsole import InterpreterInterface, process_exec_queue, start_console_server, init_mpl_in_console +from _pydev_bundle._pydev_saved_modules import threading, _queue + +from _pydev_bundle import pydev_imports +from _pydevd_bundle.pydevd_utils import save_main_module +from _pydev_bundle.pydev_console_utils import StdIn +from pydevd_file_utils import get_fullname + + +def run_file(file, globals=None, locals=None, is_module=False): + module_name = None + entry_point_fn = None + if is_module: + file, _, entry_point_fn = file.partition(':') + module_name = file + filename = get_fullname(file) + if filename is None: + sys.stderr.write("No module named %s\n" % file) + return + else: + file = filename + + if os.path.isdir(file): + new_target = os.path.join(file, '__main__.py') + if os.path.isfile(new_target): + file = new_target + + if globals is None: + m = save_main_module(file, 'pydev_run_in_console') + + globals = m.__dict__ + try: + globals['__builtins__'] = __builtins__ + except NameError: + pass # Not there on Jython... + + if locals is None: + locals = globals + + if not is_module: + sys.path.insert(0, os.path.split(file)[0]) + + print('Running %s' % file) + try: + if not is_module: + pydev_imports.execfile(file, globals, locals) # execute the script + else: + # treat ':' as a seperator between module and entry point function + # if there is no entry point we run we same as with -m switch. Otherwise we perform + # an import and execute the entry point + if entry_point_fn: + mod = __import__(module_name, level=0, fromlist=[entry_point_fn], globals=globals, locals=locals) + func = getattr(mod, entry_point_fn) + func() + else: + # Run with the -m switch + from _pydevd_bundle import pydevd_runpy + pydevd_runpy._run_module_as_main(module_name) + except: + traceback.print_exc() + + return globals + + +def skip_successful_exit(*args): + """ System exit in file shouldn't kill interpreter (i.e. in `timeit`)""" + if len(args) == 1 and args[0] in (0, None): + pass + else: + raise SystemExit(*args) + + +def process_args(argv): + setup_args = {'file': '', 'module': False} + + setup_args['port'] = argv[1] + del argv[1] + setup_args['client_port'] = argv[1] + del argv[1] + + module_flag = "--module" + if module_flag in argv: + i = argv.index(module_flag) + if i != -1: + setup_args['module'] = True + setup_args['file'] = argv[i + 1] + del sys.argv[i] + else: + setup_args['file'] = argv[1] + + del argv[0] + + return setup_args + + +#======================================================================================================================= +# main +#======================================================================================================================= +if __name__ == '__main__': + setup = process_args(sys.argv) + + port = setup['port'] + client_port = setup['client_port'] + file = setup['file'] + is_module = setup['module'] + + from _pydev_bundle import pydev_localhost + + if int(port) == 0 and int(client_port) == 0: + (h, p) = pydev_localhost.get_socket_name() + client_port = p + + host = pydev_localhost.get_localhost() + + # replace exit (see comments on method) + # note that this does not work in jython!!! (sys method can't be replaced). + sys.exit = skip_successful_exit + + connect_status_queue = _queue.Queue() + interpreter = InterpreterInterface(host, int(client_port), threading.current_thread(), connect_status_queue=connect_status_queue) + + server_thread = threading.Thread(target=start_console_server, + name='ServerThread', + args=(host, int(port), interpreter)) + server_thread.daemon = True + server_thread.start() + + sys.stdin = StdIn(interpreter, host, client_port, sys.stdin) + + init_mpl_in_console(interpreter) + + try: + success = connect_status_queue.get(True, 60) + if not success: + raise ValueError() + except: + sys.stderr.write("Console server didn't start\n") + sys.stderr.flush() + sys.exit(1) + + globals = run_file(file, None, None, is_module) + + interpreter.get_namespace().update(globals) + + interpreter.ShowConsole() + + process_exec_queue(interpreter) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_sitecustomize/__not_in_default_pythonpath.txt b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_sitecustomize/__not_in_default_pythonpath.txt new file mode 120000 index 0000000..47dde04 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_sitecustomize/__not_in_default_pythonpath.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/86/79/13/02ec52156fd38a5830d01b754552eb7c6444de16230264cf9b593eb1cf \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_sitecustomize/__pycache__/sitecustomize.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_sitecustomize/__pycache__/sitecustomize.cpython-38.pyc new file mode 100644 index 0000000..67319eb Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_sitecustomize/__pycache__/sitecustomize.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_sitecustomize/sitecustomize.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_sitecustomize/sitecustomize.py new file mode 120000 index 0000000..af63398 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_sitecustomize/sitecustomize.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/90/71/55/0882d09e0b5b353822693bb489c58387b851cea2293250127942c8f16a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevconsole.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevconsole.py new file mode 100644 index 0000000..6b13788 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevconsole.py @@ -0,0 +1,603 @@ +''' +Entry point module to start the interactive console. +''' +from _pydev_bundle._pydev_saved_modules import thread, _code +from _pydevd_bundle.pydevd_constants import IS_JYTHON +start_new_thread = thread.start_new_thread + +from _pydevd_bundle.pydevconsole_code import InteractiveConsole + +compile_command = _code.compile_command +InteractiveInterpreter = _code.InteractiveInterpreter + +import os +import sys + +from _pydev_bundle._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_constants import INTERACTIVE_MODE_AVAILABLE + +import traceback +from _pydev_bundle import pydev_log + +from _pydevd_bundle import pydevd_save_locals + +from _pydev_bundle.pydev_imports import Exec, _queue + +import builtins as __builtin__ + +from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn # @UnusedImport +from _pydev_bundle.pydev_console_utils import CodeFragment + + +class Command: + + def __init__(self, interpreter, code_fragment): + """ + :type code_fragment: CodeFragment + :type interpreter: InteractiveConsole + """ + self.interpreter = interpreter + self.code_fragment = code_fragment + self.more = None + + def symbol_for_fragment(code_fragment): + if code_fragment.is_single_line: + symbol = 'single' + else: + if IS_JYTHON: + symbol = 'single' # Jython doesn't support exec + else: + symbol = 'exec' + return symbol + + symbol_for_fragment = staticmethod(symbol_for_fragment) + + def run(self): + text = self.code_fragment.text + symbol = self.symbol_for_fragment(self.code_fragment) + + self.more = self.interpreter.runsource(text, '', symbol) + + +try: + from _pydev_bundle.pydev_imports import execfile + + __builtin__.execfile = execfile +except: + pass + +# Pull in runfile, the interface to UMD that wraps execfile +from _pydev_bundle.pydev_umd import runfile, _set_globals_function +if sys.version_info[0] >= 3: + __builtin__.runfile = runfile +else: + __builtin__.runfile = runfile + + +#======================================================================================================================= +# InterpreterInterface +#======================================================================================================================= +class InterpreterInterface(BaseInterpreterInterface): + ''' + The methods in this class should be registered in the xml-rpc server. + ''' + + def __init__(self, host, client_port, mainThread, connect_status_queue=None): + BaseInterpreterInterface.__init__(self, mainThread, connect_status_queue) + self.client_port = client_port + self.host = host + self.namespace = {} + self.interpreter = InteractiveConsole(self.namespace) + self._input_error_printed = False + + def do_add_exec(self, codeFragment): + command = Command(self.interpreter, codeFragment) + command.run() + return command.more + + def get_namespace(self): + return self.namespace + + def getCompletions(self, text, act_tok): + try: + from _pydev_bundle._pydev_completer import Completer + + completer = Completer(self.namespace, None) + return completer.complete(act_tok) + except: + pydev_log.exception() + return [] + + def close(self): + sys.exit(0) + + def get_greeting_msg(self): + return 'PyDev console: starting.\n' + + +class _ProcessExecQueueHelper: + _debug_hook = None + _return_control_osc = False + + +def set_debug_hook(debug_hook): + _ProcessExecQueueHelper._debug_hook = debug_hook + + +def activate_mpl_if_already_imported(interpreter): + if interpreter.mpl_modules_for_patching: + for module in list(interpreter.mpl_modules_for_patching): + if module in sys.modules: + activate_function = interpreter.mpl_modules_for_patching.pop(module) + activate_function() + + +def init_set_return_control_back(interpreter): + from pydev_ipython.inputhook import set_return_control_callback + + def return_control(): + ''' A function that the inputhooks can call (via inputhook.stdin_ready()) to find + out if they should cede control and return ''' + if _ProcessExecQueueHelper._debug_hook: + # Some of the input hooks check return control without doing + # a single operation, so we don't return True on every + # call when the debug hook is in place to allow the GUI to run + # XXX: Eventually the inputhook code will have diverged enough + # from the IPython source that it will be worthwhile rewriting + # it rather than pretending to maintain the old API + _ProcessExecQueueHelper._return_control_osc = not _ProcessExecQueueHelper._return_control_osc + if _ProcessExecQueueHelper._return_control_osc: + return True + + if not interpreter.exec_queue.empty(): + return True + return False + + set_return_control_callback(return_control) + + +def init_mpl_in_console(interpreter): + init_set_return_control_back(interpreter) + + if not INTERACTIVE_MODE_AVAILABLE: + return + + activate_mpl_if_already_imported(interpreter) + from _pydev_bundle.pydev_import_hook import import_hook_manager + for mod in list(interpreter.mpl_modules_for_patching): + import_hook_manager.add_module_name(mod, interpreter.mpl_modules_for_patching.pop(mod)) + + +if sys.platform != 'win32': + + if not hasattr(os, 'kill'): # Jython may not have it. + + def pid_exists(pid): + return True + + else: + + def pid_exists(pid): + # Note that this function in the face of errors will conservatively consider that + # the pid is still running (because we'll exit the current process when it's + # no longer running, so, we need to be 100% sure it actually exited). + + import errno + if pid == 0: + # According to "man 2 kill" PID 0 has a special meaning: + # it refers to <> so we don't want to go any further. + # If we get here it means this UNIX platform *does* have + # a process with id 0. + return True + try: + os.kill(pid, 0) + except OSError as err: + if err.errno == errno.ESRCH: + # ESRCH == No such process + return False + elif err.errno == errno.EPERM: + # EPERM clearly means there's a process to deny access to + return True + else: + # According to "man 2 kill" possible error values are + # (EINVAL, EPERM, ESRCH) therefore we should never get + # here. If we do, although it's an error, consider it + # exists (see first comment in this function). + return True + else: + return True + +else: + + def pid_exists(pid): + # Note that this function in the face of errors will conservatively consider that + # the pid is still running (because we'll exit the current process when it's + # no longer running, so, we need to be 100% sure it actually exited). + import ctypes + kernel32 = ctypes.windll.kernel32 + + PROCESS_QUERY_INFORMATION = 0x0400 + PROCESS_QUERY_LIMITED_INFORMATION = 0x1000 + ERROR_INVALID_PARAMETER = 0x57 + STILL_ACTIVE = 259 + + process = kernel32.OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_QUERY_LIMITED_INFORMATION, 0, pid) + if not process: + err = kernel32.GetLastError() + if err == ERROR_INVALID_PARAMETER: + # Means it doesn't exist (pid parameter is wrong). + return False + + # There was some unexpected error (such as access denied), so + # consider it exists (although it could be something else, but we don't want + # to raise any errors -- so, just consider it exists). + return True + + try: + zero = ctypes.c_int(0) + exit_code = ctypes.pointer(zero) + + exit_code_suceeded = kernel32.GetExitCodeProcess(process, exit_code) + if not exit_code_suceeded: + # There was some unexpected error (such as access denied), so + # consider it exists (although it could be something else, but we don't want + # to raise any errors -- so, just consider it exists). + return True + + elif bool(exit_code.contents.value) and int(exit_code.contents.value) != STILL_ACTIVE: + return False + finally: + kernel32.CloseHandle(process) + + return True + + +def process_exec_queue(interpreter): + init_mpl_in_console(interpreter) + from pydev_ipython.inputhook import get_inputhook + try: + kill_if_pid_not_alive = int(os.environ.get('PYDEV_ECLIPSE_PID', '-1')) + except: + kill_if_pid_not_alive = -1 + + while 1: + if kill_if_pid_not_alive != -1: + if not pid_exists(kill_if_pid_not_alive): + exit() + + # Running the request may have changed the inputhook in use + inputhook = get_inputhook() + + if _ProcessExecQueueHelper._debug_hook: + _ProcessExecQueueHelper._debug_hook() + + if inputhook: + try: + # Note: it'll block here until return_control returns True. + inputhook() + except: + pydev_log.exception() + try: + try: + code_fragment = interpreter.exec_queue.get(block=True, timeout=1 / 20.) # 20 calls/second + except _queue.Empty: + continue + + if callable(code_fragment): + # It can be a callable (i.e.: something that must run in the main + # thread can be put in the queue for later execution). + code_fragment() + else: + more = interpreter.add_exec(code_fragment) + except KeyboardInterrupt: + interpreter.buffer = None + continue + except SystemExit: + raise + except: + pydev_log.exception('Error processing queue on pydevconsole.') + exit() + + +if 'IPYTHONENABLE' in os.environ: + IPYTHON = os.environ['IPYTHONENABLE'] == 'True' +else: + # By default, don't use IPython because occasionally changes + # in IPython break pydevd. + IPYTHON = False + +try: + try: + exitfunc = sys.exitfunc + except AttributeError: + exitfunc = None + + if IPYTHON: + from _pydev_bundle.pydev_ipython_console import InterpreterInterface + if exitfunc is not None: + sys.exitfunc = exitfunc + else: + try: + delattr(sys, 'exitfunc') + except: + pass +except: + IPYTHON = False + pass + + +#======================================================================================================================= +# _DoExit +#======================================================================================================================= +def do_exit(*args): + ''' + We have to override the exit because calling sys.exit will only actually exit the main thread, + and as we're in a Xml-rpc server, that won't work. + ''' + + try: + import java.lang.System + + java.lang.System.exit(1) + except ImportError: + if len(args) == 1: + os._exit(args[0]) + else: + os._exit(0) + + +#======================================================================================================================= +# start_console_server +#======================================================================================================================= +def start_console_server(host, port, interpreter): + try: + if port == 0: + host = '' + + # I.e.: supporting the internal Jython version in PyDev to create a Jython interactive console inside Eclipse. + from _pydev_bundle.pydev_imports import SimpleXMLRPCServer as XMLRPCServer # @Reimport + + try: + server = XMLRPCServer((host, port), logRequests=False, allow_none=True) + + except: + sys.stderr.write('Error starting server with host: "%s", port: "%s", client_port: "%s"\n' % (host, port, interpreter.client_port)) + sys.stderr.flush() + raise + + # Tell UMD the proper default namespace + _set_globals_function(interpreter.get_namespace) + + server.register_function(interpreter.execLine) + server.register_function(interpreter.execMultipleLines) + server.register_function(interpreter.getCompletions) + server.register_function(interpreter.getFrame) + server.register_function(interpreter.getVariable) + server.register_function(interpreter.changeVariable) + server.register_function(interpreter.getDescription) + server.register_function(interpreter.close) + server.register_function(interpreter.interrupt) + server.register_function(interpreter.handshake) + server.register_function(interpreter.connectToDebugger) + server.register_function(interpreter.hello) + server.register_function(interpreter.getArray) + server.register_function(interpreter.evaluate) + server.register_function(interpreter.ShowConsole) + server.register_function(interpreter.loadFullValue) + + # Functions for GUI main loop integration + server.register_function(interpreter.enableGui) + + if port == 0: + (h, port) = server.socket.getsockname() + + print(port) + print(interpreter.client_port) + + while True: + try: + server.serve_forever() + except: + # Ugly code to be py2/3 compatible + # https://sw-brainwy.rhcloud.com/tracker/PyDev/534: + # Unhandled "interrupted system call" error in the pydevconsol.py + e = sys.exc_info()[1] + retry = False + try: + retry = e.args[0] == 4 # errno.EINTR + except: + pass + if not retry: + raise + # Otherwise, keep on going + return server + except: + pydev_log.exception() + # Notify about error to avoid long waiting + connection_queue = interpreter.get_connect_status_queue() + if connection_queue is not None: + connection_queue.put(False) + + +def start_server(host, port, client_port): + # replace exit (see comments on method) + # note that this does not work in jython!!! (sys method can't be replaced). + sys.exit = do_exit + + interpreter = InterpreterInterface(host, client_port, threading.current_thread()) + + start_new_thread(start_console_server, (host, port, interpreter)) + + process_exec_queue(interpreter) + + +def get_ipython_hidden_vars(): + if IPYTHON and hasattr(__builtin__, 'interpreter'): + interpreter = get_interpreter() + return interpreter.get_ipython_hidden_vars_dict() + + +def get_interpreter(): + try: + interpreterInterface = getattr(__builtin__, 'interpreter') + except AttributeError: + interpreterInterface = InterpreterInterface(None, None, threading.current_thread()) + __builtin__.interpreter = interpreterInterface + sys.stderr.write(interpreterInterface.get_greeting_msg()) + sys.stderr.flush() + + return interpreterInterface + + +def get_completions(text, token, globals, locals): + interpreterInterface = get_interpreter() + + interpreterInterface.interpreter.update(globals, locals) + + return interpreterInterface.getCompletions(text, token) + +#=============================================================================== +# Debugger integration +#=============================================================================== + + +def exec_code(code, globals, locals, debugger): + interpreterInterface = get_interpreter() + interpreterInterface.interpreter.update(globals, locals) + + res = interpreterInterface.need_more(code) + + if res: + return True + + interpreterInterface.add_exec(code, debugger) + + return False + + +class ConsoleWriter(InteractiveInterpreter): + skip = 0 + + def __init__(self, locals=None): + InteractiveInterpreter.__init__(self, locals) + + def write(self, data): + # if (data.find("global_vars") == -1 and data.find("pydevd") == -1): + if self.skip > 0: + self.skip -= 1 + else: + if data == "Traceback (most recent call last):\n": + self.skip = 1 + sys.stderr.write(data) + + def showsyntaxerror(self, filename=None): + """Display the syntax error that just occurred.""" + # Override for avoid using sys.excepthook PY-12600 + type, value, tb = sys.exc_info() + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + if filename and type is SyntaxError: + # Work hard to stuff the correct filename in the exception + try: + msg, (dummy_filename, lineno, offset, line) = value.args + except ValueError: + # Not the format we expect; leave it alone + pass + else: + # Stuff in the right filename + value = SyntaxError(msg, (filename, lineno, offset, line)) + sys.last_value = value + list = traceback.format_exception_only(type, value) + sys.stderr.write(''.join(list)) + + def showtraceback(self, *args, **kwargs): + """Display the exception that just occurred.""" + # Override for avoid using sys.excepthook PY-12600 + try: + type, value, tb = sys.exc_info() + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + tblist = traceback.extract_tb(tb) + del tblist[:1] + lines = traceback.format_list(tblist) + if lines: + lines.insert(0, "Traceback (most recent call last):\n") + lines.extend(traceback.format_exception_only(type, value)) + finally: + tblist = tb = None + sys.stderr.write(''.join(lines)) + + +def console_exec(thread_id, frame_id, expression, dbg): + """returns 'False' in case expression is partially correct + """ + frame = dbg.find_frame(thread_id, frame_id) + + is_multiline = expression.count('@LINE@') > 1 + expression = str(expression.replace('@LINE@', '\n')) + + # Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329 + # (Names not resolved in generator expression in method) + # See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html + updated_globals = {} + updated_globals.update(frame.f_globals) + updated_globals.update(frame.f_locals) # locals later because it has precedence over the actual globals + + if IPYTHON: + need_more = exec_code(CodeFragment(expression), updated_globals, frame.f_locals, dbg) + if not need_more: + pydevd_save_locals.save_locals(frame) + return need_more + + interpreter = ConsoleWriter() + + if not is_multiline: + try: + code = compile_command(expression) + except (OverflowError, SyntaxError, ValueError): + # Case 1 + interpreter.showsyntaxerror() + return False + if code is None: + # Case 2 + return True + else: + code = expression + + # Case 3 + + try: + Exec(code, updated_globals, frame.f_locals) + + except SystemExit: + raise + except: + interpreter.showtraceback() + else: + pydevd_save_locals.save_locals(frame) + return False + + +#======================================================================================================================= +# main +#======================================================================================================================= +if __name__ == '__main__': + # Important: don't use this module directly as the __main__ module, rather, import itself as pydevconsole + # so that we don't get multiple pydevconsole modules if it's executed directly (otherwise we'd have multiple + # representations of its classes). + # See: https://sw-brainwy.rhcloud.com/tracker/PyDev/446: + # 'Variables' and 'Expressions' views stopped working when debugging interactive console + import pydevconsole + sys.stdin = pydevconsole.BaseStdIn(sys.stdin) + port, client_port = sys.argv[1:3] + from _pydev_bundle import pydev_localhost + + if int(port) == 0 and int(client_port) == 0: + (h, p) = pydev_localhost.get_socket_name() + + client_port = p + + pydevconsole.start_server(pydev_localhost.get_localhost(), int(port), int(client_port)) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd.py new file mode 100644 index 0000000..67382de --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd.py @@ -0,0 +1,3433 @@ +''' +Entry point module (keep at root): + +This module starts the debugger. +''' +import sys # @NoMove +if sys.version_info[:2] < (3, 6): + raise RuntimeError('The PyDev.Debugger requires Python 3.6 onwards to be run. If you need to use an older Python version, use an older version of the debugger.') +import os + +try: + # Just empty packages to check if they're in the PYTHONPATH. + import _pydev_bundle +except ImportError: + # On the first import of a pydevd module, add pydevd itself to the PYTHONPATH + # if its dependencies cannot be imported. + sys.path.append(os.path.dirname(os.path.abspath(__file__))) + import _pydev_bundle + +# Import this first as it'll check for shadowed modules and will make sure that we import +# things as needed for gevent. +from _pydevd_bundle import pydevd_constants + +import atexit +import dis +import io +from collections import defaultdict +from contextlib import contextmanager +from functools import partial +import itertools +import traceback +import weakref +import getpass as getpass_mod +import functools + +import pydevd_file_utils +from _pydev_bundle import pydev_imports, pydev_log +from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding +from _pydev_bundle.pydev_is_thread_alive import is_thread_alive +from _pydev_bundle.pydev_override import overrides +from _pydev_bundle._pydev_saved_modules import threading, time, thread +from _pydevd_bundle import pydevd_extension_utils, pydevd_frame_utils +from _pydevd_bundle.pydevd_filtering import FilesFiltering, glob_matches_path +from _pydevd_bundle import pydevd_io, pydevd_vm_type +from _pydevd_bundle import pydevd_utils +from _pydevd_bundle import pydevd_runpy +from _pydev_bundle.pydev_console_utils import DebugConsoleStdIn +from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info +from _pydevd_bundle.pydevd_breakpoints import ExceptionBreakpoint, get_exception_breakpoint +from _pydevd_bundle.pydevd_comm_constants import (CMD_THREAD_SUSPEND, CMD_STEP_INTO, CMD_SET_BREAK, + CMD_STEP_INTO_MY_CODE, CMD_STEP_OVER, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, + CMD_SET_NEXT_STATEMENT, CMD_STEP_RETURN, CMD_ADD_EXCEPTION_BREAK, CMD_STEP_RETURN_MY_CODE, + CMD_STEP_OVER_MY_CODE, constant_to_str, CMD_STEP_INTO_COROUTINE) +from _pydevd_bundle.pydevd_constants import (get_thread_id, get_current_thread_id, + DebugInfoHolder, PYTHON_SUSPEND, STATE_SUSPEND, STATE_RUN, get_frame, + clear_cached_thread_id, INTERACTIVE_MODE_AVAILABLE, SHOW_DEBUG_INFO_ENV, NULL, + NO_FTRACE, IS_IRONPYTHON, JSON_PROTOCOL, IS_CPYTHON, HTTP_JSON_PROTOCOL, USE_CUSTOM_SYS_CURRENT_FRAMES_MAP, call_only_once, + ForkSafeLock, IGNORE_BASENAMES_STARTING_WITH, EXCEPTION_TYPE_UNHANDLED, SUPPORT_GEVENT, + PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING, PYDEVD_IPYTHON_CONTEXT) +from _pydevd_bundle.pydevd_defaults import PydevdCustomization # Note: import alias used on pydev_monkey. +from _pydevd_bundle.pydevd_custom_frames import CustomFramesContainer, custom_frames_container_init +from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE, LIB_FILE, DONT_TRACE_DIRS +from _pydevd_bundle.pydevd_extension_api import DebuggerEventHandler +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, remove_exception_from_frame +from _pydevd_bundle.pydevd_net_command_factory_xml import NetCommandFactory +from _pydevd_bundle.pydevd_trace_dispatch import ( + trace_dispatch as _trace_dispatch, global_cache_skips, global_cache_frame_skips, fix_top_level_trace_and_get_trace_func) +from _pydevd_bundle.pydevd_utils import save_main_module, is_current_thread_main_thread, \ + import_attr_from_module +from _pydevd_frame_eval.pydevd_frame_eval_main import ( + frame_eval_func, dummy_trace_dispatch) +import pydev_ipython # @UnusedImport +from _pydevd_bundle.pydevd_source_mapping import SourceMapping +from _pydevd_bundle.pydevd_concurrency_analyser.pydevd_concurrency_logger import ThreadingLogger, AsyncioLogger, send_concurrency_message, cur_time +from _pydevd_bundle.pydevd_concurrency_analyser.pydevd_thread_wrappers import wrap_threads +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER +from pydevd_file_utils import get_fullname, get_package_dir +from os.path import abspath as os_path_abspath +import pydevd_tracing +from _pydevd_bundle.pydevd_comm import (InternalThreadCommand, InternalThreadCommandForAnyThread, + create_server_socket, FSNotifyThread) +from _pydevd_bundle.pydevd_comm import(InternalConsoleExec, + _queue, ReaderThread, GetGlobalDebugger, get_global_debugger, + set_global_debugger, WriterThread, + start_client, start_server, InternalGetBreakpointException, InternalSendCurrExceptionTrace, + InternalSendCurrExceptionTraceProceeded) +from _pydevd_bundle.pydevd_daemon_thread import PyDBDaemonThread, mark_as_pydevd_daemon_thread +from _pydevd_bundle.pydevd_process_net_command_json import PyDevJsonCommandProcessor +from _pydevd_bundle.pydevd_process_net_command import process_net_command +from _pydevd_bundle.pydevd_net_command import NetCommand, NULL_NET_COMMAND + +from _pydevd_bundle.pydevd_breakpoints import stop_on_unhandled_exception +from _pydevd_bundle.pydevd_collect_bytecode_info import collect_try_except_info, collect_return_info, collect_try_except_info_from_source +from _pydevd_bundle.pydevd_suspended_frames import SuspendedFramesManager +from socket import SHUT_RDWR +from _pydevd_bundle.pydevd_api import PyDevdAPI +from _pydevd_bundle.pydevd_timeout import TimeoutTracker +from _pydevd_bundle.pydevd_thread_lifecycle import suspend_all_threads, mark_thread_suspended + +pydevd_gevent_integration = None + +if SUPPORT_GEVENT: + try: + from _pydevd_bundle import pydevd_gevent_integration + except: + pydev_log.exception( + 'pydevd: GEVENT_SUPPORT is set but gevent is not available in the environment.\n' + 'Please unset GEVENT_SUPPORT from the environment variables or install gevent.') + else: + pydevd_gevent_integration.log_gevent_debug_info() + +if USE_CUSTOM_SYS_CURRENT_FRAMES_MAP: + from _pydevd_bundle.pydevd_constants import constructed_tid_to_last_frame + +__version_info__ = (2, 8, 0) +__version_info_str__ = [] +for v in __version_info__: + __version_info_str__.append(str(v)) + +__version__ = '.'.join(__version_info_str__) + +# IMPORTANT: pydevd_constants must be the 1st thing defined because it'll keep a reference to the original sys._getframe + + +def install_breakpointhook(pydevd_breakpointhook=None): + if pydevd_breakpointhook is None: + + def pydevd_breakpointhook(*args, **kwargs): + hookname = os.getenv('PYTHONBREAKPOINT') + if ( + hookname is not None + and len(hookname) > 0 + and hasattr(sys, '__breakpointhook__') + and sys.__breakpointhook__ != pydevd_breakpointhook + ): + sys.__breakpointhook__(*args, **kwargs) + else: + settrace(*args, **kwargs) + + if sys.version_info[0:2] >= (3, 7): + # There are some choices on how to provide the breakpoint hook. Namely, we can provide a + # PYTHONBREAKPOINT which provides the import path for a method to be executed or we + # can override sys.breakpointhook. + # pydevd overrides sys.breakpointhook instead of providing an environment variable because + # it's possible that the debugger starts the user program but is not available in the + # PYTHONPATH (and would thus fail to be imported if PYTHONBREAKPOINT was set to pydevd.settrace). + # Note that the implementation still takes PYTHONBREAKPOINT in account (so, if it was provided + # by someone else, it'd still work). + sys.breakpointhook = pydevd_breakpointhook + else: + if sys.version_info[0] >= 3: + import builtins as __builtin__ # Py3 noqa + else: + import __builtin__ # noqa + + # In older versions, breakpoint() isn't really available, so, install the hook directly + # in the builtins. + __builtin__.breakpoint = pydevd_breakpointhook + sys.__breakpointhook__ = pydevd_breakpointhook + + +# Install the breakpoint hook at import time. +install_breakpointhook() + +from _pydevd_bundle.pydevd_plugin_utils import PluginManager + +threadingEnumerate = threading.enumerate +threadingCurrentThread = threading.current_thread + +try: + 'dummy'.encode('utf-8') # Added because otherwise Jython 2.2.1 wasn't finding the encoding (if it wasn't loaded in the main thread). +except: + pass + +_global_redirect_stdout_to_server = False +_global_redirect_stderr_to_server = False + +file_system_encoding = getfilesystemencoding() + +_CACHE_FILE_TYPE = {} + +pydev_log.debug('Using GEVENT_SUPPORT: %s', pydevd_constants.SUPPORT_GEVENT) +pydev_log.debug('Using GEVENT_SHOW_PAUSED_GREENLETS: %s', pydevd_constants.GEVENT_SHOW_PAUSED_GREENLETS) +pydev_log.debug('pydevd __file__: %s', os.path.abspath(__file__)) +pydev_log.debug('Using PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING: %s', pydevd_constants.PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING) +if pydevd_constants.PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING: + pydev_log.debug('PYDEVD_IPYTHON_CONTEXT: %s', pydevd_constants.PYDEVD_IPYTHON_CONTEXT) + + +#======================================================================================================================= +# PyDBCommandThread +#======================================================================================================================= +class PyDBCommandThread(PyDBDaemonThread): + + def __init__(self, py_db): + PyDBDaemonThread.__init__(self, py_db) + self._py_db_command_thread_event = py_db._py_db_command_thread_event + self.name = 'pydevd.CommandThread' + + @overrides(PyDBDaemonThread._on_run) + def _on_run(self): + # Delay a bit this initialization to wait for the main program to start. + self._py_db_command_thread_event.wait(0.3) + + if self._kill_received: + return + + try: + while not self._kill_received: + try: + self.py_db.process_internal_commands() + except: + pydev_log.info('Finishing debug communication...(2)') + self._py_db_command_thread_event.clear() + self._py_db_command_thread_event.wait(0.3) + except: + try: + pydev_log.debug(sys.exc_info()[0]) + except: + # In interpreter shutdown many things can go wrong (any module variables may + # be None, streams can be closed, etc). + pass + + # only got this error in interpreter shutdown + # pydev_log.info('Finishing debug communication...(3)') + + @overrides(PyDBDaemonThread.do_kill_pydev_thread) + def do_kill_pydev_thread(self): + PyDBDaemonThread.do_kill_pydev_thread(self) + # Set flag so that it can exit before the usual timeout. + self._py_db_command_thread_event.set() + + +#======================================================================================================================= +# CheckAliveThread +# Non-daemon thread: guarantees that all data is written even if program is finished +#======================================================================================================================= +class CheckAliveThread(PyDBDaemonThread): + + def __init__(self, py_db): + PyDBDaemonThread.__init__(self, py_db) + self.name = 'pydevd.CheckAliveThread' + self.daemon = False + self._wait_event = threading.Event() + + @overrides(PyDBDaemonThread._on_run) + def _on_run(self): + py_db = self.py_db + + def can_exit(): + with py_db._main_lock: + # Note: it's important to get the lock besides checking that it's empty (this + # means that we're not in the middle of some command processing). + writer = py_db.writer + writer_empty = writer is not None and writer.empty() + + return not py_db.has_user_threads_alive() and writer_empty + + try: + while not self._kill_received: + self._wait_event.wait(0.3) + if can_exit(): + break + + py_db.check_output_redirect() + + if can_exit(): + pydev_log.debug("No threads alive, finishing debug session") + py_db.dispose_and_kill_all_pydevd_threads() + except: + pydev_log.exception() + + def join(self, timeout=None): + # If someone tries to join this thread, mark it to be killed. + # This is the case for CherryPy when auto-reload is turned on. + self.do_kill_pydev_thread() + PyDBDaemonThread.join(self, timeout=timeout) + + @overrides(PyDBDaemonThread.do_kill_pydev_thread) + def do_kill_pydev_thread(self): + PyDBDaemonThread.do_kill_pydev_thread(self) + # Set flag so that it can exit before the usual timeout. + self._wait_event.set() + + +class AbstractSingleNotificationBehavior(object): + ''' + The basic usage should be: + + # Increment the request time for the suspend. + single_notification_behavior.increment_suspend_time() + + # Notify that this is a pause request (when a pause, not a breakpoint). + single_notification_behavior.on_pause() + + # Mark threads to be suspended. + set_suspend(...) + + # On do_wait_suspend, use notify_thread_suspended: + def do_wait_suspend(...): + with single_notification_behavior.notify_thread_suspended(thread_id): + ... + ''' + + __slots__ = [ + '_last_resume_notification_time', + '_last_suspend_notification_time', + '_lock', + '_next_request_time', + '_suspend_time_request', + '_suspended_thread_ids', + '_pause_requested', + '_py_db', + ] + + NOTIFY_OF_PAUSE_TIMEOUT = .5 + + def __init__(self, py_db): + self._py_db = weakref.ref(py_db) + self._next_request_time = partial(next, itertools.count()) + self._last_suspend_notification_time = -1 + self._last_resume_notification_time = -1 + self._suspend_time_request = self._next_request_time() + self._lock = thread.allocate_lock() + self._suspended_thread_ids = set() + self._pause_requested = False + + def send_suspend_notification(self, thread_id, stop_reason): + raise AssertionError('abstract: subclasses must override.') + + def send_resume_notification(self, thread_id): + raise AssertionError('abstract: subclasses must override.') + + def increment_suspend_time(self): + with self._lock: + self._suspend_time_request = self._next_request_time() + + def on_pause(self): + # Upon a pause, we should force sending new suspend notifications + # if no notification is sent after some time and there's some thread already stopped. + with self._lock: + self._pause_requested = True + global_suspend_time = self._suspend_time_request + py_db = self._py_db() + if py_db is not None: + py_db.timeout_tracker.call_on_timeout( + self.NOTIFY_OF_PAUSE_TIMEOUT, + self._notify_after_timeout, + kwargs={'global_suspend_time': global_suspend_time} + ) + + def _notify_after_timeout(self, global_suspend_time): + with self._lock: + if self._suspended_thread_ids: + if global_suspend_time > self._last_suspend_notification_time: + self._last_suspend_notification_time = global_suspend_time + # Notify about any thread which is currently suspended. + pydev_log.info('Sending suspend notification after timeout.') + self.send_suspend_notification(next(iter(self._suspended_thread_ids)), CMD_THREAD_SUSPEND) + + def on_thread_suspend(self, thread_id, stop_reason): + with self._lock: + pause_requested = self._pause_requested + if pause_requested: + # When a suspend notification is sent, reset the pause flag. + self._pause_requested = False + + self._suspended_thread_ids.add(thread_id) + + # CMD_THREAD_SUSPEND should always be a side-effect of a break, so, only + # issue for a CMD_THREAD_SUSPEND if a pause is pending. + if stop_reason != CMD_THREAD_SUSPEND or pause_requested: + if self._suspend_time_request > self._last_suspend_notification_time: + pydev_log.info('Sending suspend notification.') + self._last_suspend_notification_time = self._suspend_time_request + self.send_suspend_notification(thread_id, stop_reason) + else: + pydev_log.info( + 'Suspend not sent (it was already sent). Last suspend % <= Last resume %s', + self._last_suspend_notification_time, + self._last_resume_notification_time, + ) + else: + pydev_log.info( + 'Suspend not sent because stop reason is thread suspend and pause was not requested.', + ) + + def on_thread_resume(self, thread_id): + # on resume (step, continue all): + with self._lock: + self._suspended_thread_ids.remove(thread_id) + if self._last_resume_notification_time < self._last_suspend_notification_time: + pydev_log.info('Sending resume notification.') + self._last_resume_notification_time = self._last_suspend_notification_time + self.send_resume_notification(thread_id) + else: + pydev_log.info( + 'Resume not sent (it was already sent). Last resume %s >= Last suspend %s', + self._last_resume_notification_time, + self._last_suspend_notification_time, + ) + + @contextmanager + def notify_thread_suspended(self, thread_id, stop_reason): + self.on_thread_suspend(thread_id, stop_reason) + try: + yield # At this point the thread must be actually suspended. + finally: + self.on_thread_resume(thread_id) + + +class ThreadsSuspendedSingleNotification(AbstractSingleNotificationBehavior): + + __slots__ = AbstractSingleNotificationBehavior.__slots__ + [ + 'multi_threads_single_notification', '_callbacks', '_callbacks_lock'] + + def __init__(self, py_db): + AbstractSingleNotificationBehavior.__init__(self, py_db) + # If True, pydevd will send a single notification when all threads are suspended/resumed. + self.multi_threads_single_notification = False + self._callbacks_lock = threading.Lock() + self._callbacks = [] + + def add_on_resumed_callback(self, callback): + with self._callbacks_lock: + self._callbacks.append(callback) + + @overrides(AbstractSingleNotificationBehavior.send_resume_notification) + def send_resume_notification(self, thread_id): + py_db = self._py_db() + if py_db is not None: + py_db.writer.add_command(py_db.cmd_factory.make_thread_resume_single_notification(thread_id)) + + with self._callbacks_lock: + callbacks = self._callbacks + self._callbacks = [] + + for callback in callbacks: + callback() + + @overrides(AbstractSingleNotificationBehavior.send_suspend_notification) + def send_suspend_notification(self, thread_id, stop_reason): + py_db = self._py_db() + if py_db is not None: + py_db.writer.add_command(py_db.cmd_factory.make_thread_suspend_single_notification(py_db, thread_id, stop_reason)) + + @overrides(AbstractSingleNotificationBehavior.notify_thread_suspended) + @contextmanager + def notify_thread_suspended(self, thread_id, stop_reason): + if self.multi_threads_single_notification: + with AbstractSingleNotificationBehavior.notify_thread_suspended(self, thread_id, stop_reason): + yield + else: + yield + + +class _Authentication(object): + + __slots__ = ['access_token', 'client_access_token', '_authenticated', '_wrong_attempts'] + + def __init__(self): + # A token to be send in the command line or through the settrace api -- when such token + # is given, the first message sent to the IDE must pass the same token to authenticate. + # Note that if a disconnect is sent, the same message must be resent to authenticate. + self.access_token = None + + # This token is the one that the client requires to accept a connection from pydevd + # (it's stored here and just passed back when required, it's not used internally + # for anything else). + self.client_access_token = None + + self._authenticated = None + + self._wrong_attempts = 0 + + def is_authenticated(self): + if self._authenticated is None: + return self.access_token is None + return self._authenticated + + def login(self, access_token): + if self._wrong_attempts >= 10: # A user can fail to authenticate at most 10 times. + return + + self._authenticated = access_token == self.access_token + if not self._authenticated: + self._wrong_attempts += 1 + else: + self._wrong_attempts = 0 + + def logout(self): + self._authenticated = None + self._wrong_attempts = 0 + + +class PyDB(object): + """ Main debugging class + Lots of stuff going on here: + + PyDB starts two threads on startup that connect to remote debugger (RDB) + The threads continuously read & write commands to RDB. + PyDB communicates with these threads through command queues. + Every RDB command is processed by calling process_net_command. + Every PyDB net command is sent to the net by posting NetCommand to WriterThread queue + + Some commands need to be executed on the right thread (suspend/resume & friends) + These are placed on the internal command queue. + """ + + # Direct child pids which should not be terminated when terminating processes. + # Note: class instance because it should outlive PyDB instances. + dont_terminate_child_pids = set() + + def __init__(self, set_as_global=True): + if set_as_global: + pydevd_tracing.replace_sys_set_trace_func() + + self.authentication = _Authentication() + + self.reader = None + self.writer = None + self._fsnotify_thread = None + self.created_pydb_daemon_threads = {} + self._waiting_for_connection_thread = None + self._on_configuration_done_event = threading.Event() + self.check_alive_thread = None + self.py_db_command_thread = None + self.quitting = None + self.cmd_factory = NetCommandFactory() + self._cmd_queue = defaultdict(_queue.Queue) # Key is thread id or '*', value is Queue + self.suspended_frames_manager = SuspendedFramesManager() + self._files_filtering = FilesFiltering() + self.timeout_tracker = TimeoutTracker(self) + + # Note: when the source mapping is changed we also have to clear the file types cache + # (because if a given file is a part of the project or not may depend on it being + # defined in the source mapping). + self.source_mapping = SourceMapping(on_source_mapping_changed=self._clear_filters_caches) + + # Determines whether we should terminate child processes when asked to terminate. + self.terminate_child_processes = True + + # These are the breakpoints received by the PyDevdAPI. They are meant to store + # the breakpoints in the api -- its actual contents are managed by the api. + self.api_received_breakpoints = {} + + # These are the breakpoints meant to be consumed during runtime. + self.breakpoints = {} + self.function_breakpoint_name_to_breakpoint = {} + + # Set communication protocol + PyDevdAPI().set_protocol(self, 0, PydevdCustomization.DEFAULT_PROTOCOL) + + self.variable_presentation = PyDevdAPI.VariablePresentation() + + # mtime to be raised when breakpoints change + self.mtime = 0 + + self.file_to_id_to_line_breakpoint = {} + self.file_to_id_to_plugin_breakpoint = {} + + # Note: breakpoints dict should not be mutated: a copy should be created + # and later it should be assigned back (to prevent concurrency issues). + self.break_on_uncaught_exceptions = {} + self.break_on_caught_exceptions = {} + self.break_on_user_uncaught_exceptions = {} + + self.ready_to_run = False + self._main_lock = thread.allocate_lock() + self._lock_running_thread_ids = thread.allocate_lock() + self._lock_create_fs_notify = thread.allocate_lock() + self._py_db_command_thread_event = threading.Event() + if set_as_global: + CustomFramesContainer._py_db_command_thread_event = self._py_db_command_thread_event + + self.pydb_disposed = False + self._wait_for_threads_to_finish_called = False + self._wait_for_threads_to_finish_called_lock = thread.allocate_lock() + self._wait_for_threads_to_finish_called_event = threading.Event() + + self.terminate_requested = False + self._disposed_lock = thread.allocate_lock() + self.signature_factory = None + self.SetTrace = pydevd_tracing.SetTrace + self.skip_on_exceptions_thrown_in_same_context = False + self.ignore_exceptions_thrown_in_lines_with_ignore_exception = True + + # Suspend debugger even if breakpoint condition raises an exception. + # May be changed with CMD_PYDEVD_JSON_CONFIG. + self.skip_suspend_on_breakpoint_exception = () # By default suspend on any Exception. + self.skip_print_breakpoint_exception = () # By default print on any Exception. + + # By default user can step into properties getter/setter/deleter methods + self.disable_property_trace = False + self.disable_property_getter_trace = False + self.disable_property_setter_trace = False + self.disable_property_deleter_trace = False + + # this is a dict of thread ids pointing to thread ids. Whenever a command is passed to the java end that + # acknowledges that a thread was created, the thread id should be passed here -- and if at some time we do not + # find that thread alive anymore, we must remove it from this list and make the java side know that the thread + # was killed. + self._running_thread_ids = {} + # Note: also access '_enable_thread_notifications' with '_lock_running_thread_ids' + self._enable_thread_notifications = False + + self._set_breakpoints_with_id = False + + # This attribute holds the file-> lines which have an @IgnoreException. + self.filename_to_lines_where_exceptions_are_ignored = {} + + # working with plugins (lazily initialized) + self.plugin = None + self.has_plugin_line_breaks = False + self.has_plugin_exception_breaks = False + self.thread_analyser = None + self.asyncio_analyser = None + + # The GUI event loop that's going to run. + # Possible values: + # matplotlib - Whatever GUI backend matplotlib is using. + # 'wx'/'qt'/'none'/... - GUI toolkits that have bulitin support. See pydevd_ipython/inputhook.py:24. + # Other - A custom function that'll be imported and run. + self._gui_event_loop = 'matplotlib' + self._installed_gui_support = False + self.gui_in_use = False + + # GUI event loop support in debugger + self.activate_gui_function = None + + # matplotlib support in debugger and debug console + self.mpl_hooks_in_debug_console = False + self.mpl_modules_for_patching = {} + + self._filename_to_not_in_scope = {} + self.first_breakpoint_reached = False + self._exclude_filters_enabled = self._files_filtering.use_exclude_filters() + self._is_libraries_filter_enabled = self._files_filtering.use_libraries_filter() + self.is_files_filter_enabled = self._exclude_filters_enabled or self._is_libraries_filter_enabled + self.show_return_values = False + self.remove_return_values_flag = False + self.redirect_output = False + # Note that besides the `redirect_output` flag, we also need to consider that someone + # else is already redirecting (i.e.: debugpy). + self.is_output_redirected = False + + # this flag disables frame evaluation even if it's available + self.use_frame_eval = True + + # If True, pydevd will send a single notification when all threads are suspended/resumed. + self._threads_suspended_single_notification = ThreadsSuspendedSingleNotification(self) + + # If True a step command will do a step in one thread and will also resume all other threads. + self.stepping_resumes_all_threads = False + + self._local_thread_trace_func = threading.local() + + self._server_socket_ready_event = threading.Event() + self._server_socket_name = None + + # Bind many locals to the debugger because upon teardown those names may become None + # in the namespace (and thus can't be relied upon unless the reference was previously + # saved). + if IS_IRONPYTHON: + + # A partial() cannot be used in IronPython for sys.settrace. + def new_trace_dispatch(frame, event, arg): + return _trace_dispatch(self, frame, event, arg) + + self.trace_dispatch = new_trace_dispatch + else: + self.trace_dispatch = partial(_trace_dispatch, self) + self.fix_top_level_trace_and_get_trace_func = fix_top_level_trace_and_get_trace_func + self.frame_eval_func = frame_eval_func + self.dummy_trace_dispatch = dummy_trace_dispatch + + # Note: this is different from pydevd_constants.thread_get_ident because we want Jython + # to be None here because it also doesn't have threading._active. + try: + self.threading_get_ident = threading.get_ident # Python 3 + self.threading_active = threading._active + except: + try: + self.threading_get_ident = threading._get_ident # Python 2 noqa + self.threading_active = threading._active + except: + self.threading_get_ident = None # Jython + self.threading_active = None + self.threading_current_thread = threading.currentThread + self.set_additional_thread_info = set_additional_thread_info + self.stop_on_unhandled_exception = stop_on_unhandled_exception + self.collect_return_info = collect_return_info + self.get_exception_breakpoint = get_exception_breakpoint + self._dont_trace_get_file_type = DONT_TRACE.get + self._dont_trace_dirs_get_file_type = DONT_TRACE_DIRS.get + self.PYDEV_FILE = PYDEV_FILE + self.LIB_FILE = LIB_FILE + + self._in_project_scope_cache = {} + self._exclude_by_filter_cache = {} + self._apply_filter_cache = {} + self._ignore_system_exit_codes = set() + + # DAP related + self._dap_messages_listeners = [] + + if set_as_global: + # Set as the global instance only after it's initialized. + set_global_debugger(self) + + # Stop the tracing as the last thing before the actual shutdown for a clean exit. + atexit.register(stoptrace) + + def collect_try_except_info(self, code_obj): + filename = code_obj.co_filename + try: + if os.path.exists(filename): + pydev_log.debug('Collecting try..except info from source for %s', filename) + try_except_infos = collect_try_except_info_from_source(filename) + if try_except_infos: + # Filter for the current function + max_line = -1 + min_line = sys.maxsize + for _, line in dis.findlinestarts(code_obj): + + if line > max_line: + max_line = line + + if line < min_line: + min_line = line + + try_except_infos = [x for x in try_except_infos if min_line <= x.try_line <= max_line] + return try_except_infos + + except: + pydev_log.exception('Error collecting try..except info from source (%s)', filename) + + pydev_log.debug('Collecting try..except info from bytecode for %s', filename) + return collect_try_except_info(code_obj) + + def setup_auto_reload_watcher(self, enable_auto_reload, watch_dirs, poll_target_time, exclude_patterns, include_patterns): + try: + with self._lock_create_fs_notify: + + # When setting up, dispose of the previous one (if any). + if self._fsnotify_thread is not None: + self._fsnotify_thread.do_kill_pydev_thread() + self._fsnotify_thread = None + + if not enable_auto_reload: + return + + exclude_patterns = tuple(exclude_patterns) + include_patterns = tuple(include_patterns) + + def accept_directory(absolute_filename, cache={}): + try: + return cache[absolute_filename] + except: + if absolute_filename and absolute_filename[-1] not in ('/', '\\'): + # I.e.: for directories we always end with '/' or '\\' so that + # we match exclusions such as "**/node_modules/**" + absolute_filename += os.path.sep + + # First include what we want + for include_pattern in include_patterns: + if glob_matches_path(absolute_filename, include_pattern): + cache[absolute_filename] = True + return True + + # Then exclude what we don't want + for exclude_pattern in exclude_patterns: + if glob_matches_path(absolute_filename, exclude_pattern): + cache[absolute_filename] = False + return False + + # By default track all directories not excluded. + cache[absolute_filename] = True + return True + + def accept_file(absolute_filename, cache={}): + try: + return cache[absolute_filename] + except: + # First include what we want + for include_pattern in include_patterns: + if glob_matches_path(absolute_filename, include_pattern): + cache[absolute_filename] = True + return True + + # Then exclude what we don't want + for exclude_pattern in exclude_patterns: + if glob_matches_path(absolute_filename, exclude_pattern): + cache[absolute_filename] = False + return False + + # By default don't track files not included. + cache[absolute_filename] = False + return False + + self._fsnotify_thread = FSNotifyThread(self, PyDevdAPI(), watch_dirs) + watcher = self._fsnotify_thread.watcher + watcher.accept_directory = accept_directory + watcher.accept_file = accept_file + + watcher.target_time_for_single_scan = poll_target_time + watcher.target_time_for_notification = poll_target_time + self._fsnotify_thread.start() + except: + pydev_log.exception('Error setting up auto-reload.') + + def get_arg_ppid(self): + try: + setup = SetupHolder.setup + if setup: + return int(setup.get('ppid', 0)) + except: + pydev_log.exception('Error getting ppid.') + + return 0 + + def wait_for_ready_to_run(self): + while not self.ready_to_run: + # busy wait until we receive run command + self.process_internal_commands() + self._py_db_command_thread_event.clear() + self._py_db_command_thread_event.wait(0.1) + + def on_initialize(self): + ''' + Note: only called when using the DAP (Debug Adapter Protocol). + ''' + self._on_configuration_done_event.clear() + + def on_configuration_done(self): + ''' + Note: only called when using the DAP (Debug Adapter Protocol). + ''' + self._on_configuration_done_event.set() + self._py_db_command_thread_event.set() + + def is_attached(self): + return self._on_configuration_done_event.is_set() + + def on_disconnect(self): + ''' + Note: only called when using the DAP (Debug Adapter Protocol). + ''' + self.authentication.logout() + self._on_configuration_done_event.clear() + + def set_ignore_system_exit_codes(self, ignore_system_exit_codes): + assert isinstance(ignore_system_exit_codes, (list, tuple, set)) + self._ignore_system_exit_codes = set(ignore_system_exit_codes) + + def ignore_system_exit_code(self, system_exit_exc): + if hasattr(system_exit_exc, 'code'): + return system_exit_exc.code in self._ignore_system_exit_codes + else: + return system_exit_exc in self._ignore_system_exit_codes + + def block_until_configuration_done(self, cancel=None): + if cancel is None: + cancel = NULL + + while not cancel.is_set(): + if self._on_configuration_done_event.is_set(): + cancel.set() # Set cancel to prevent reuse + return + + self.process_internal_commands() + self._py_db_command_thread_event.clear() + self._py_db_command_thread_event.wait(1 / 15.) + + def add_fake_frame(self, thread_id, frame_id, frame): + self.suspended_frames_manager.add_fake_frame(thread_id, frame_id, frame) + + def handle_breakpoint_condition(self, info, pybreakpoint, new_frame): + condition = pybreakpoint.condition + try: + if pybreakpoint.handle_hit_condition(new_frame): + return True + + if not condition: + return False + + return eval(condition, new_frame.f_globals, new_frame.f_locals) + except Exception as e: + if not isinstance(e, self.skip_print_breakpoint_exception): + stack_trace = io.StringIO() + etype, value, tb = sys.exc_info() + traceback.print_exception(etype, value, tb.tb_next, file=stack_trace) + + msg = 'Error while evaluating expression in conditional breakpoint: %s\n%s' % ( + condition, stack_trace.getvalue()) + api = PyDevdAPI() + api.send_error_message(self, msg) + + if not isinstance(e, self.skip_suspend_on_breakpoint_exception): + try: + # add exception_type and stacktrace into thread additional info + etype, value, tb = sys.exc_info() + error = ''.join(traceback.format_exception_only(etype, value)) + stack = traceback.extract_stack(f=tb.tb_frame.f_back) + + # On self.set_suspend(thread, CMD_SET_BREAK) this info will be + # sent to the client. + info.conditional_breakpoint_exception = \ + ('Condition:\n' + condition + '\n\nError:\n' + error, stack) + except: + pydev_log.exception() + return True + + return False + + finally: + etype, value, tb = None, None, None + + def handle_breakpoint_expression(self, pybreakpoint, info, new_frame): + try: + try: + val = eval(pybreakpoint.expression, new_frame.f_globals, new_frame.f_locals) + except: + val = sys.exc_info()[1] + finally: + if val is not None: + info.pydev_message = str(val) + + def _internal_get_file_type(self, abs_real_path_and_basename): + basename = abs_real_path_and_basename[-1] + if ( + basename.startswith(IGNORE_BASENAMES_STARTING_WITH) or + abs_real_path_and_basename[0].startswith(IGNORE_BASENAMES_STARTING_WITH) + ): + # Note: these are the files that are completely ignored (they aren't shown to the user + # as user nor library code as it's usually just noise in the frame stack). + return self.PYDEV_FILE + file_type = self._dont_trace_get_file_type(basename) + if file_type is not None: + return file_type + + if basename.startswith('__init__.py'): + # i.e.: ignore the __init__ files inside pydevd (the other + # files are ignored just by their name). + abs_path = abs_real_path_and_basename[0] + i = max(abs_path.rfind('/'), abs_path.rfind('\\')) + if i: + abs_path = abs_path[0:i] + i = max(abs_path.rfind('/'), abs_path.rfind('\\')) + if i: + dirname = abs_path[i + 1:] + # At this point, something as: + # "my_path\_pydev_runfiles\__init__.py" + # is now "_pydev_runfiles". + return self._dont_trace_dirs_get_file_type(dirname) + return None + + def dont_trace_external_files(self, abs_path): + ''' + :param abs_path: + The result from get_abs_path_real_path_and_base_from_file or + get_abs_path_real_path_and_base_from_frame. + + :return + True : + If files should NOT be traced. + + False: + If files should be traced. + ''' + # By default all external files are traced. Note: this function is expected to + # be changed for another function in PyDevdAPI.set_dont_trace_start_end_patterns. + return False + + def get_file_type(self, frame, abs_real_path_and_basename=None, _cache_file_type=_CACHE_FILE_TYPE): + ''' + :param abs_real_path_and_basename: + The result from get_abs_path_real_path_and_base_from_file or + get_abs_path_real_path_and_base_from_frame. + + :return + _pydevd_bundle.pydevd_dont_trace_files.PYDEV_FILE: + If it's a file internal to the debugger which shouldn't be + traced nor shown to the user. + + _pydevd_bundle.pydevd_dont_trace_files.LIB_FILE: + If it's a file in a library which shouldn't be traced. + + None: + If it's a regular user file which should be traced. + ''' + if abs_real_path_and_basename is None: + try: + # Make fast path faster! + abs_real_path_and_basename = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + abs_real_path_and_basename = get_abs_path_real_path_and_base_from_frame(frame) + + # Note 1: we have to take into account that we may have files as '', and that in + # this case the cache key can't rely only on the filename. With the current cache, there's + # still a potential miss if 2 functions which have exactly the same content are compiled + # with '', but in practice as we only separate the one from python -c from the rest + # this shouldn't be a problem in practice. + + # Note 2: firstlineno added to make misses faster in the first comparison. + + # Note 3: this cache key is repeated in pydevd_frame_evaluator.pyx:get_func_code_info (for + # speedups). + cache_key = (frame.f_code.co_firstlineno, abs_real_path_and_basename[0], frame.f_code) + try: + return _cache_file_type[cache_key] + except: + if abs_real_path_and_basename[0] == '': + + # Consider it an untraceable file unless there's no back frame (ignoring + # internal files and runpy.py). + f = frame.f_back + while f is not None: + if (self.get_file_type(f) != self.PYDEV_FILE and + pydevd_file_utils.basename(f.f_code.co_filename) not in ('runpy.py', '')): + # We found some back frame that's not internal, which means we must consider + # this a library file. + # This is done because we only want to trace files as if they don't + # have any back frame (which is the case for python -c ...), for all other + # cases we don't want to trace them because we can't show the source to the + # user (at least for now...). + + # Note that we return as a LIB_FILE and not PYDEV_FILE because we still want + # to show it in the stack. + _cache_file_type[cache_key] = LIB_FILE + return LIB_FILE + f = f.f_back + else: + # This is a top-level file (used in python -c), so, trace it as usual... we + # still won't be able to show the sources, but some tests require this to work. + _cache_file_type[cache_key] = None + return None + + file_type = self._internal_get_file_type(abs_real_path_and_basename) + if file_type is None: + if self.dont_trace_external_files(abs_real_path_and_basename[0]): + file_type = PYDEV_FILE + + _cache_file_type[cache_key] = file_type + return file_type + + def is_cache_file_type_empty(self): + return not _CACHE_FILE_TYPE + + def get_cache_file_type(self, _cache=_CACHE_FILE_TYPE): # i.e.: Make it local. + return _cache + + def get_thread_local_trace_func(self): + try: + thread_trace_func = self._local_thread_trace_func.thread_trace_func + except AttributeError: + thread_trace_func = self.trace_dispatch + return thread_trace_func + + def enable_tracing(self, thread_trace_func=None, apply_to_all_threads=False): + ''' + Enables tracing. + + If in regular mode (tracing), will set the tracing function to the tracing + function for this thread -- by default it's `PyDB.trace_dispatch`, but after + `PyDB.enable_tracing` is called with a `thread_trace_func`, the given function will + be the default for the given thread. + + :param bool apply_to_all_threads: + If True we'll set the tracing function in all threads, not only in the current thread. + If False only the tracing for the current function should be changed. + In general apply_to_all_threads should only be true if this is the first time + this function is called on a multi-threaded program (either programmatically or attach + to pid). + ''' + if pydevd_gevent_integration is not None: + pydevd_gevent_integration.enable_gevent_integration() + + if self.frame_eval_func is not None: + self.frame_eval_func() + pydevd_tracing.SetTrace(self.dummy_trace_dispatch) + + if IS_CPYTHON and apply_to_all_threads: + pydevd_tracing.set_trace_to_threads(self.dummy_trace_dispatch) + return + + if apply_to_all_threads: + # If applying to all threads, don't use the local thread trace function. + assert thread_trace_func is not None + else: + if thread_trace_func is None: + thread_trace_func = self.get_thread_local_trace_func() + else: + self._local_thread_trace_func.thread_trace_func = thread_trace_func + + pydevd_tracing.SetTrace(thread_trace_func) + if IS_CPYTHON and apply_to_all_threads: + pydevd_tracing.set_trace_to_threads(thread_trace_func) + + def disable_tracing(self): + pydevd_tracing.SetTrace(None) + + def on_breakpoints_changed(self, removed=False): + ''' + When breakpoints change, we have to re-evaluate all the assumptions we've made so far. + ''' + if not self.ready_to_run: + # No need to do anything if we're still not running. + return + + self.mtime += 1 + if not removed: + # When removing breakpoints we can leave tracing as was, but if a breakpoint was added + # we have to reset the tracing for the existing functions to be re-evaluated. + self.set_tracing_for_untraced_contexts() + + def set_tracing_for_untraced_contexts(self): + # Enable the tracing for existing threads (because there may be frames being executed that + # are currently untraced). + + if IS_CPYTHON: + # Note: use sys._current_frames instead of threading.enumerate() because this way + # we also see C/C++ threads, not only the ones visible to the threading module. + tid_to_frame = sys._current_frames() + + ignore_thread_ids = set( + t.ident for t in threadingEnumerate() + if getattr(t, 'is_pydev_daemon_thread', False) or getattr(t, 'pydev_do_not_trace', False) + ) + + for thread_id, frame in tid_to_frame.items(): + if thread_id not in ignore_thread_ids: + self.set_trace_for_frame_and_parents(frame) + + else: + try: + threads = threadingEnumerate() + for t in threads: + if getattr(t, 'is_pydev_daemon_thread', False) or getattr(t, 'pydev_do_not_trace', False): + continue + + additional_info = set_additional_thread_info(t) + frame = additional_info.get_topmost_frame(t) + try: + if frame is not None: + self.set_trace_for_frame_and_parents(frame) + finally: + frame = None + finally: + frame = None + t = None + threads = None + additional_info = None + + @property + def multi_threads_single_notification(self): + return self._threads_suspended_single_notification.multi_threads_single_notification + + @multi_threads_single_notification.setter + def multi_threads_single_notification(self, notify): + self._threads_suspended_single_notification.multi_threads_single_notification = notify + + @property + def threads_suspended_single_notification(self): + return self._threads_suspended_single_notification + + def get_plugin_lazy_init(self): + if self.plugin is None: + self.plugin = PluginManager(self) + return self.plugin + + def in_project_scope(self, frame, absolute_filename=None): + ''' + Note: in general this method should not be used (apply_files_filter should be used + in most cases as it also handles the project scope check). + + :param frame: + The frame we want to check. + + :param absolute_filename: + Must be the result from get_abs_path_real_path_and_base_from_frame(frame)[0] (can + be used to speed this function a bit if it's already available to the caller, but + in general it's not needed). + ''' + try: + if absolute_filename is None: + try: + # Make fast path faster! + abs_real_path_and_basename = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + abs_real_path_and_basename = get_abs_path_real_path_and_base_from_frame(frame) + + absolute_filename = abs_real_path_and_basename[0] + + cache_key = (frame.f_code.co_firstlineno, absolute_filename, frame.f_code) + + return self._in_project_scope_cache[cache_key] + except KeyError: + cache = self._in_project_scope_cache + try: + abs_real_path_and_basename # If we've gotten it previously, use it again. + except NameError: + abs_real_path_and_basename = get_abs_path_real_path_and_base_from_frame(frame) + + # pydevd files are never considered to be in the project scope. + file_type = self.get_file_type(frame, abs_real_path_and_basename) + if file_type == self.PYDEV_FILE: + cache[cache_key] = False + + elif absolute_filename == '': + # Special handling for '' + if file_type == self.LIB_FILE: + cache[cache_key] = False + else: + cache[cache_key] = True + + elif self.source_mapping.has_mapping_entry(absolute_filename): + cache[cache_key] = True + + else: + cache[cache_key] = self._files_filtering.in_project_roots(absolute_filename) + + return cache[cache_key] + + def in_project_roots_filename_uncached(self, absolute_filename): + return self._files_filtering.in_project_roots(absolute_filename) + + def _clear_filters_caches(self): + self._in_project_scope_cache.clear() + self._exclude_by_filter_cache.clear() + self._apply_filter_cache.clear() + self._exclude_filters_enabled = self._files_filtering.use_exclude_filters() + self._is_libraries_filter_enabled = self._files_filtering.use_libraries_filter() + self.is_files_filter_enabled = self._exclude_filters_enabled or self._is_libraries_filter_enabled + + def clear_dont_trace_start_end_patterns_caches(self): + # When start/end patterns are changed we must clear all caches which would be + # affected by a change in get_file_type() and reset the tracing function + # as places which were traced may no longer need to be traced and vice-versa. + self.on_breakpoints_changed() + _CACHE_FILE_TYPE.clear() + self._clear_filters_caches() + self._clear_skip_caches() + + def _exclude_by_filter(self, frame, absolute_filename): + ''' + :return: True if it should be excluded, False if it should be included and None + if no rule matched the given file. + + :note: it'll be normalized as needed inside of this method. + ''' + cache_key = (absolute_filename, frame.f_code.co_name, frame.f_code.co_firstlineno) + try: + return self._exclude_by_filter_cache[cache_key] + except KeyError: + cache = self._exclude_by_filter_cache + + # pydevd files are always filtered out + if self.get_file_type(frame) == self.PYDEV_FILE: + cache[cache_key] = True + else: + module_name = None + if self._files_filtering.require_module: + module_name = frame.f_globals.get('__name__', '') + cache[cache_key] = self._files_filtering.exclude_by_filter(absolute_filename, module_name) + + return cache[cache_key] + + def apply_files_filter(self, frame, original_filename, force_check_project_scope): + ''' + Should only be called if `self.is_files_filter_enabled == True` or `force_check_project_scope == True`. + + Note that it covers both the filter by specific paths includes/excludes as well + as the check which filters out libraries if not in the project scope. + + :param original_filename: + Note can either be the original filename or the absolute version of that filename. + + :param force_check_project_scope: + Check that the file is in the project scope even if the global setting + is off. + + :return bool: + True if it should be excluded when stepping and False if it should be + included. + ''' + cache_key = (frame.f_code.co_firstlineno, original_filename, force_check_project_scope, frame.f_code) + try: + return self._apply_filter_cache[cache_key] + except KeyError: + if self.plugin is not None and (self.has_plugin_line_breaks or self.has_plugin_exception_breaks): + # If it's explicitly needed by some plugin, we can't skip it. + if not self.plugin.can_skip(self, frame): + pydev_log.debug_once('File traced (included by plugins): %s', original_filename) + self._apply_filter_cache[cache_key] = False + return False + + if self._exclude_filters_enabled: + absolute_filename = pydevd_file_utils.absolute_path(original_filename) + exclude_by_filter = self._exclude_by_filter(frame, absolute_filename) + if exclude_by_filter is not None: + if exclude_by_filter: + # ignore files matching stepping filters + pydev_log.debug_once('File not traced (excluded by filters): %s', original_filename) + + self._apply_filter_cache[cache_key] = True + return True + else: + pydev_log.debug_once('File traced (explicitly included by filters): %s', original_filename) + + self._apply_filter_cache[cache_key] = False + return False + + if (self._is_libraries_filter_enabled or force_check_project_scope) and not self.in_project_scope(frame): + # ignore library files while stepping + self._apply_filter_cache[cache_key] = True + if force_check_project_scope: + pydev_log.debug_once('File not traced (not in project): %s', original_filename) + else: + pydev_log.debug_once('File not traced (not in project - force_check_project_scope): %s', original_filename) + + return True + + if force_check_project_scope: + pydev_log.debug_once('File traced: %s (force_check_project_scope)', original_filename) + else: + pydev_log.debug_once('File traced: %s', original_filename) + self._apply_filter_cache[cache_key] = False + return False + + def exclude_exception_by_filter(self, exception_breakpoint, trace): + if not exception_breakpoint.ignore_libraries and not self._exclude_filters_enabled: + return False + + if trace is None: + return True + + ignore_libraries = exception_breakpoint.ignore_libraries + exclude_filters_enabled = self._exclude_filters_enabled + + if (ignore_libraries and not self.in_project_scope(trace.tb_frame)) \ + or (exclude_filters_enabled and self._exclude_by_filter( + trace.tb_frame, + pydevd_file_utils.absolute_path(trace.tb_frame.f_code.co_filename))): + return True + + return False + + def set_project_roots(self, project_roots): + self._files_filtering.set_project_roots(project_roots) + self._clear_skip_caches() + self._clear_filters_caches() + + def set_exclude_filters(self, exclude_filters): + self._files_filtering.set_exclude_filters(exclude_filters) + self._clear_skip_caches() + self._clear_filters_caches() + + def set_use_libraries_filter(self, use_libraries_filter): + self._files_filtering.set_use_libraries_filter(use_libraries_filter) + self._clear_skip_caches() + self._clear_filters_caches() + + def get_use_libraries_filter(self): + return self._files_filtering.use_libraries_filter() + + def get_require_module_for_filters(self): + return self._files_filtering.require_module + + def has_user_threads_alive(self): + for t in pydevd_utils.get_non_pydevd_threads(): + if isinstance(t, PyDBDaemonThread): + pydev_log.error_once( + 'Error in debugger: Found PyDBDaemonThread not marked with is_pydev_daemon_thread=True.\n') + + if is_thread_alive(t): + if not t.daemon or hasattr(t, "__pydevd_main_thread"): + return True + + return False + + def initialize_network(self, sock, terminate_on_socket_close=True): + assert sock is not None + try: + sock.settimeout(None) # infinite, no timeouts from now on - jython does not have it + except: + pass + curr_reader = getattr(self, 'reader', None) + curr_writer = getattr(self, 'writer', None) + if curr_reader: + curr_reader.do_kill_pydev_thread() + if curr_writer: + curr_writer.do_kill_pydev_thread() + + self.writer = WriterThread(sock, self, terminate_on_socket_close=terminate_on_socket_close) + self.reader = ReaderThread( + sock, + self, + PyDevJsonCommandProcessor=PyDevJsonCommandProcessor, + process_net_command=process_net_command, + terminate_on_socket_close=terminate_on_socket_close + ) + self.writer.start() + self.reader.start() + + time.sleep(0.1) # give threads time to start + + def connect(self, host, port): + if host: + s = start_client(host, port) + else: + s = start_server(port) + + self.initialize_network(s) + + def create_wait_for_connection_thread(self): + if self._waiting_for_connection_thread is not None: + raise AssertionError('There is already another thread waiting for a connection.') + + self._server_socket_ready_event.clear() + self._waiting_for_connection_thread = self._WaitForConnectionThread(self) + self._waiting_for_connection_thread.start() + + def set_server_socket_ready(self): + self._server_socket_ready_event.set() + + def wait_for_server_socket_ready(self): + self._server_socket_ready_event.wait() + + @property + def dap_messages_listeners(self): + return self._dap_messages_listeners + + def add_dap_messages_listener(self, listener): + self._dap_messages_listeners.append(listener) + + class _WaitForConnectionThread(PyDBDaemonThread): + + def __init__(self, py_db): + PyDBDaemonThread.__init__(self, py_db) + self._server_socket = None + + def run(self): + host = SetupHolder.setup['client'] + port = SetupHolder.setup['port'] + + self._server_socket = create_server_socket(host=host, port=port) + self.py_db._server_socket_name = self._server_socket.getsockname() + self.py_db.set_server_socket_ready() + + while not self._kill_received: + try: + s = self._server_socket + if s is None: + return + + s.listen(1) + new_socket, _addr = s.accept() + if self._kill_received: + pydev_log.info("Connection (from wait_for_attach) accepted but ignored as kill was already received.") + return + + pydev_log.info("Connection (from wait_for_attach) accepted.") + reader = getattr(self.py_db, 'reader', None) + if reader is not None: + # This is needed if a new connection is done without the client properly + # sending a disconnect for the previous connection. + api = PyDevdAPI() + api.request_disconnect(self.py_db, resume_threads=False) + + self.py_db.initialize_network(new_socket, terminate_on_socket_close=False) + + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL > 0: + pydev_log.exception() + pydev_log.debug("Exiting _WaitForConnectionThread: %s\n", port) + + def do_kill_pydev_thread(self): + PyDBDaemonThread.do_kill_pydev_thread(self) + s = self._server_socket + if s is not None: + try: + s.close() + except: + pass + self._server_socket = None + + def get_internal_queue(self, thread_id): + """ returns internal command queue for a given thread. + if new queue is created, notify the RDB about it """ + if thread_id.startswith('__frame__'): + thread_id = thread_id[thread_id.rfind('|') + 1:] + return self._cmd_queue[thread_id] + + def post_method_as_internal_command(self, thread_id, method, *args, **kwargs): + if thread_id == '*': + internal_cmd = InternalThreadCommandForAnyThread(thread_id, method, *args, **kwargs) + else: + internal_cmd = InternalThreadCommand(thread_id, method, *args, **kwargs) + self.post_internal_command(internal_cmd, thread_id) + if thread_id == '*': + # Notify so that the command is handled as soon as possible. + self._py_db_command_thread_event.set() + + def post_internal_command(self, int_cmd, thread_id): + """ if thread_id is *, post to the '*' queue""" + queue = self.get_internal_queue(thread_id) + queue.put(int_cmd) + + def enable_output_redirection(self, redirect_stdout, redirect_stderr): + global _global_redirect_stdout_to_server + global _global_redirect_stderr_to_server + + _global_redirect_stdout_to_server = redirect_stdout + _global_redirect_stderr_to_server = redirect_stderr + self.redirect_output = redirect_stdout or redirect_stderr + if _global_redirect_stdout_to_server: + _init_stdout_redirect() + if _global_redirect_stderr_to_server: + _init_stderr_redirect() + + def check_output_redirect(self): + global _global_redirect_stdout_to_server + global _global_redirect_stderr_to_server + + if _global_redirect_stdout_to_server: + _init_stdout_redirect() + + if _global_redirect_stderr_to_server: + _init_stderr_redirect() + + def init_matplotlib_in_debug_console(self): + # import hook and patches for matplotlib support in debug console + from _pydev_bundle.pydev_import_hook import import_hook_manager + if is_current_thread_main_thread(): + for module in list(self.mpl_modules_for_patching): + import_hook_manager.add_module_name(module, self.mpl_modules_for_patching.pop(module)) + + def init_gui_support(self): + if self._installed_gui_support: + return + self._installed_gui_support = True + + # enable_gui and enable_gui_function in activate_matplotlib should be called in main thread. Unlike integrated console, + # in the debug console we have no interpreter instance with exec_queue, but we run this code in the main + # thread and can call it directly. + class _ReturnGuiLoopControlHelper: + _return_control_osc = False + + def return_control(): + # Some of the input hooks (e.g. Qt4Agg) check return control without doing + # a single operation, so we don't return True on every + # call when the debug hook is in place to allow the GUI to run + _ReturnGuiLoopControlHelper._return_control_osc = not _ReturnGuiLoopControlHelper._return_control_osc + return _ReturnGuiLoopControlHelper._return_control_osc + + from pydev_ipython.inputhook import set_return_control_callback, enable_gui + + set_return_control_callback(return_control) + + if self._gui_event_loop == 'matplotlib': + # prepare debugger for matplotlib integration with GUI event loop + from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot, do_enable_gui + + self.mpl_modules_for_patching = {"matplotlib": lambda: activate_matplotlib(do_enable_gui), + "matplotlib.pyplot": activate_pyplot, + "pylab": activate_pylab } + else: + self.activate_gui_function = enable_gui + + def _activate_gui_if_needed(self): + if self.gui_in_use: + return + + if len(self.mpl_modules_for_patching) > 0: + if is_current_thread_main_thread(): # Note that we call only in the main thread. + for module in list(self.mpl_modules_for_patching): + if module in sys.modules: + activate_function = self.mpl_modules_for_patching.pop(module, None) + if activate_function is not None: + activate_function() + self.gui_in_use = True + + if self.activate_gui_function: + if is_current_thread_main_thread(): # Only call enable_gui in the main thread. + try: + # First try to activate builtin GUI event loops. + self.activate_gui_function(self._gui_event_loop) + self.activate_gui_function = None + self.gui_in_use = True + except ValueError: + # The user requested a custom GUI event loop, try to import it. + from pydev_ipython.inputhook import set_inputhook + try: + inputhook_function = import_attr_from_module(self._gui_event_loop) + set_inputhook(inputhook_function) + self.gui_in_use = True + except Exception as e: + pydev_log.debug("Cannot activate custom GUI event loop {}: {}".format(self._gui_event_loop, e)) + finally: + self.activate_gui_function = None + + def _call_input_hook(self): + try: + from pydev_ipython.inputhook import get_inputhook + inputhook = get_inputhook() + if inputhook: + inputhook() + except: + pass + + def notify_skipped_step_in_because_of_filters(self, frame): + self.writer.add_command(self.cmd_factory.make_skipped_step_in_because_of_filters(self, frame)) + + def notify_thread_created(self, thread_id, thread, use_lock=True): + if self.writer is None: + # Protect about threads being created before the communication structure is in place + # (note that they will appear later on anyways as pydevd does reconcile live/dead threads + # when processing internal commands, albeit it may take longer and in general this should + # not be usual as it's expected that the debugger is live before other threads are created). + return + + with self._lock_running_thread_ids if use_lock else NULL: + if not self._enable_thread_notifications: + return + + if thread_id in self._running_thread_ids: + return + + additional_info = set_additional_thread_info(thread) + if additional_info.pydev_notify_kill: + # After we notify it should be killed, make sure we don't notify it's alive (on a racing condition + # this could happen as we may notify before the thread is stopped internally). + return + + self._running_thread_ids[thread_id] = thread + + self.writer.add_command(self.cmd_factory.make_thread_created_message(thread)) + + def notify_thread_not_alive(self, thread_id, use_lock=True): + """ if thread is not alive, cancel trace_dispatch processing """ + if self.writer is None: + return + + with self._lock_running_thread_ids if use_lock else NULL: + if not self._enable_thread_notifications: + return + + thread = self._running_thread_ids.pop(thread_id, None) + if thread is None: + return + + additional_info = set_additional_thread_info(thread) + was_notified = additional_info.pydev_notify_kill + if not was_notified: + additional_info.pydev_notify_kill = True + + self.writer.add_command(self.cmd_factory.make_thread_killed_message(thread_id)) + + def set_enable_thread_notifications(self, enable): + with self._lock_running_thread_ids: + if self._enable_thread_notifications != enable: + self._enable_thread_notifications = enable + + if enable: + # As it was previously disabled, we have to notify about existing threads again + # (so, clear the cache related to that). + self._running_thread_ids = {} + + def process_internal_commands(self): + ''' + This function processes internal commands. + ''' + # If this method is being called before the debugger is ready to run we should not notify + # about threads and should only process commands sent to all threads. + ready_to_run = self.ready_to_run + + dispose = False + with self._main_lock: + program_threads_alive = {} + if ready_to_run: + self.check_output_redirect() + + all_threads = threadingEnumerate() + program_threads_dead = [] + with self._lock_running_thread_ids: + reset_cache = not self._running_thread_ids + + for t in all_threads: + if getattr(t, 'is_pydev_daemon_thread', False): + pass # I.e.: skip the DummyThreads created from pydev daemon threads + elif isinstance(t, PyDBDaemonThread): + pydev_log.error_once('Error in debugger: Found PyDBDaemonThread not marked with is_pydev_daemon_thread=True.') + + elif is_thread_alive(t): + if reset_cache: + # Fix multiprocessing debug with breakpoints in both main and child processes + # (https://youtrack.jetbrains.com/issue/PY-17092) When the new process is created, the main + # thread in the new process already has the attribute 'pydevd_id', so the new thread doesn't + # get new id with its process number and the debugger loses access to both threads. + # Therefore we should update thread_id for every main thread in the new process. + clear_cached_thread_id(t) + + thread_id = get_thread_id(t) + program_threads_alive[thread_id] = t + + self.notify_thread_created(thread_id, t, use_lock=False) + + # Compute and notify about threads which are no longer alive. + thread_ids = list(self._running_thread_ids.keys()) + for thread_id in thread_ids: + if thread_id not in program_threads_alive: + program_threads_dead.append(thread_id) + + for thread_id in program_threads_dead: + self.notify_thread_not_alive(thread_id, use_lock=False) + + cmds_to_execute = [] + + # Without self._lock_running_thread_ids + if len(program_threads_alive) == 0 and ready_to_run: + dispose = True + else: + # Actually process the commands now (make sure we don't have a lock for _lock_running_thread_ids + # acquired at this point as it could lead to a deadlock if some command evaluated tried to + # create a thread and wait for it -- which would try to notify about it getting that lock). + curr_thread_id = get_current_thread_id(threadingCurrentThread()) + if ready_to_run: + process_thread_ids = (curr_thread_id, '*') + else: + process_thread_ids = ('*',) + + for thread_id in process_thread_ids: + queue = self.get_internal_queue(thread_id) + + # some commands must be processed by the thread itself... if that's the case, + # we will re-add the commands to the queue after executing. + cmds_to_add_back = [] + + try: + while True: + int_cmd = queue.get(False) + + if not self.mpl_hooks_in_debug_console and isinstance(int_cmd, InternalConsoleExec) and not self.gui_in_use: + # add import hooks for matplotlib patches if only debug console was started + try: + self.init_matplotlib_in_debug_console() + self.gui_in_use = True + except: + pydev_log.debug("Matplotlib support in debug console failed", traceback.format_exc()) + self.mpl_hooks_in_debug_console = True + + if int_cmd.can_be_executed_by(curr_thread_id): + cmds_to_execute.append(int_cmd) + else: + pydev_log.verbose("NOT processing internal command: %s ", int_cmd) + cmds_to_add_back.append(int_cmd) + + except _queue.Empty: # @UndefinedVariable + # this is how we exit + for int_cmd in cmds_to_add_back: + queue.put(int_cmd) + + if dispose: + # Note: must be called without the main lock to avoid deadlocks. + self.dispose_and_kill_all_pydevd_threads() + else: + # Actually execute the commands without the main lock! + for int_cmd in cmds_to_execute: + pydev_log.verbose("processing internal command: %s", int_cmd) + try: + int_cmd.do_it(self) + except: + pydev_log.exception('Error processing internal command.') + + def consolidate_breakpoints(self, canonical_normalized_filename, id_to_breakpoint, file_to_line_to_breakpoints): + break_dict = {} + for _breakpoint_id, pybreakpoint in id_to_breakpoint.items(): + break_dict[pybreakpoint.line] = pybreakpoint + + file_to_line_to_breakpoints[canonical_normalized_filename] = break_dict + self._clear_skip_caches() + + def _clear_skip_caches(self): + global_cache_skips.clear() + global_cache_frame_skips.clear() + + def add_break_on_exception( + self, + exception, + condition, + expression, + notify_on_handled_exceptions, + notify_on_unhandled_exceptions, + notify_on_user_unhandled_exceptions, + notify_on_first_raise_only, + ignore_libraries=False + ): + try: + eb = ExceptionBreakpoint( + exception, + condition, + expression, + notify_on_handled_exceptions, + notify_on_unhandled_exceptions, + notify_on_user_unhandled_exceptions, + notify_on_first_raise_only, + ignore_libraries + ) + except ImportError: + pydev_log.critical("Error unable to add break on exception for: %s (exception could not be imported).", exception) + return None + + if eb.notify_on_unhandled_exceptions: + cp = self.break_on_uncaught_exceptions.copy() + cp[exception] = eb + if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: + pydev_log.critical("Exceptions to hook on terminate: %s.", cp) + self.break_on_uncaught_exceptions = cp + + if eb.notify_on_handled_exceptions: + cp = self.break_on_caught_exceptions.copy() + cp[exception] = eb + if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: + pydev_log.critical("Exceptions to hook always: %s.", cp) + self.break_on_caught_exceptions = cp + + if eb.notify_on_user_unhandled_exceptions: + cp = self.break_on_user_uncaught_exceptions.copy() + cp[exception] = eb + if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: + pydev_log.critical("Exceptions to hook on user uncaught code: %s.", cp) + self.break_on_user_uncaught_exceptions = cp + + return eb + + def set_suspend(self, thread, stop_reason, suspend_other_threads=False, is_pause=False, original_step_cmd=-1): + ''' + :param thread: + The thread which should be suspended. + + :param stop_reason: + Reason why the thread was suspended. + + :param suspend_other_threads: + Whether to force other threads to be suspended (i.e.: when hitting a breakpoint + with a suspend all threads policy). + + :param is_pause: + If this is a pause to suspend all threads, any thread can be considered as the 'main' + thread paused. + + :param original_step_cmd: + If given we may change the stop reason to this. + ''' + self._threads_suspended_single_notification.increment_suspend_time() + if is_pause: + self._threads_suspended_single_notification.on_pause() + + info = mark_thread_suspended(thread, stop_reason, original_step_cmd=original_step_cmd) + + if is_pause: + # Must set tracing after setting the state to suspend. + frame = info.get_topmost_frame(thread) + if frame is not None: + try: + self.set_trace_for_frame_and_parents(frame) + finally: + frame = None + + # If conditional breakpoint raises any exception during evaluation send the details to the client. + if stop_reason == CMD_SET_BREAK and info.conditional_breakpoint_exception is not None: + conditional_breakpoint_exception_tuple = info.conditional_breakpoint_exception + info.conditional_breakpoint_exception = None + self._send_breakpoint_condition_exception(thread, conditional_breakpoint_exception_tuple) + + if not suspend_other_threads and self.multi_threads_single_notification: + # In the mode which gives a single notification when all threads are + # stopped, stop all threads whenever a set_suspend is issued. + suspend_other_threads = True + + if suspend_other_threads: + # Suspend all except the current one (which we're currently suspending already). + suspend_all_threads(self, except_thread=thread) + + def _send_breakpoint_condition_exception(self, thread, conditional_breakpoint_exception_tuple): + """If conditional breakpoint raises an exception during evaluation + send exception details to java + """ + thread_id = get_thread_id(thread) + # conditional_breakpoint_exception_tuple - should contain 2 values (exception_type, stacktrace) + if conditional_breakpoint_exception_tuple and len(conditional_breakpoint_exception_tuple) == 2: + exc_type, stacktrace = conditional_breakpoint_exception_tuple + int_cmd = InternalGetBreakpointException(thread_id, exc_type, stacktrace) + self.post_internal_command(int_cmd, thread_id) + + def send_caught_exception_stack(self, thread, arg, curr_frame_id): + """Sends details on the exception which was caught (and where we stopped) to the java side. + + arg is: exception type, description, traceback object + """ + thread_id = get_thread_id(thread) + int_cmd = InternalSendCurrExceptionTrace(thread_id, arg, curr_frame_id) + self.post_internal_command(int_cmd, thread_id) + + def send_caught_exception_stack_proceeded(self, thread): + """Sends that some thread was resumed and is no longer showing an exception trace. + """ + thread_id = get_thread_id(thread) + int_cmd = InternalSendCurrExceptionTraceProceeded(thread_id) + self.post_internal_command(int_cmd, thread_id) + self.process_internal_commands() + + def send_process_created_message(self): + """Sends a message that a new process has been created. + """ + if self.writer is None or self.cmd_factory is None: + return + cmd = self.cmd_factory.make_process_created_message() + self.writer.add_command(cmd) + + def send_process_about_to_be_replaced(self): + """Sends a message that a new process has been created. + """ + if self.writer is None or self.cmd_factory is None: + return + cmd = self.cmd_factory.make_process_about_to_be_replaced_message() + if cmd is NULL_NET_COMMAND: + return + + sent = [False] + + def after_sent(*args, **kwargs): + sent[0] = True + + cmd.call_after_send(after_sent) + self.writer.add_command(cmd) + + timeout = 5 # Wait up to 5 seconds + initial_time = time.time() + while not sent[0]: + time.sleep(.05) + + if (time.time() - initial_time) > timeout: + pydev_log.critical('pydevd: Sending message related to process being replaced timed-out after %s seconds', timeout) + break + + def set_next_statement(self, frame, event, func_name, next_line): + stop = False + response_msg = "" + old_line = frame.f_lineno + if event == 'line' or event == 'exception': + # If we're already in the correct context, we have to stop it now, because we can act only on + # line events -- if a return was the next statement it wouldn't work (so, we have this code + # repeated at pydevd_frame). + + curr_func_name = frame.f_code.co_name + + # global context is set with an empty name + if curr_func_name in ('?', ''): + curr_func_name = '' + + if func_name == '*' or curr_func_name == func_name: + line = next_line + frame.f_trace = self.trace_dispatch + frame.f_lineno = line + stop = True + else: + response_msg = "jump is available only within the bottom frame" + return stop, old_line, response_msg + + def cancel_async_evaluation(self, thread_id, frame_id): + with self._main_lock: + try: + all_threads = threadingEnumerate() + for t in all_threads: + if getattr(t, 'is_pydev_daemon_thread', False) and hasattr(t, 'cancel_event') and t.thread_id == thread_id and \ + t.frame_id == frame_id: + t.cancel_event.set() + except: + pydev_log.exception() + + def find_frame(self, thread_id, frame_id): + """ returns a frame on the thread that has a given frame_id """ + return self.suspended_frames_manager.find_frame(thread_id, frame_id) + + def do_wait_suspend(self, thread, frame, event, arg, exception_type=None): # @UnusedVariable + """ busy waits until the thread state changes to RUN + it expects thread's state as attributes of the thread. + Upon running, processes any outstanding Stepping commands. + + :param exception_type: + If pausing due to an exception, its type. + """ + if USE_CUSTOM_SYS_CURRENT_FRAMES_MAP: + constructed_tid_to_last_frame[thread.ident] = sys._getframe() + self.process_internal_commands() + + thread_id = get_current_thread_id(thread) + + # print('do_wait_suspend %s %s %s %s %s %s (%s)' % (frame.f_lineno, frame.f_code.co_name, frame.f_code.co_filename, event, arg, constant_to_str(thread.additional_info.pydev_step_cmd), constant_to_str(thread.additional_info.pydev_original_step_cmd))) + # print('--- stack ---') + # print(traceback.print_stack(file=sys.stdout)) + # print('--- end stack ---') + + # Send the suspend message + message = thread.additional_info.pydev_message + suspend_type = thread.additional_info.trace_suspend_type + thread.additional_info.trace_suspend_type = 'trace' # Reset to trace mode for next call. + stop_reason = thread.stop_reason + + frames_list = None + + if arg is not None and event == 'exception': + # arg must be the exception info (tuple(exc_type, exc, traceback)) + exc_type, exc_desc, trace_obj = arg + if trace_obj is not None: + frames_list = pydevd_frame_utils.create_frames_list_from_traceback(trace_obj, frame, exc_type, exc_desc, exception_type=exception_type) + + if frames_list is None: + frames_list = pydevd_frame_utils.create_frames_list_from_frame(frame) + + if DebugInfoHolder.DEBUG_TRACE_LEVEL > 2: + pydev_log.debug( + 'PyDB.do_wait_suspend\nname: %s (line: %s)\n file: %s\n event: %s\n arg: %s\n step: %s (original step: %s)\n thread: %s, thread id: %s, id(thread): %s', + frame.f_code.co_name, + frame.f_lineno, + frame.f_code.co_filename, + event, + arg, + constant_to_str(thread.additional_info.pydev_step_cmd), + constant_to_str(thread.additional_info.pydev_original_step_cmd), + thread, + thread_id, + id(thread), + ) + for f in frames_list: + pydev_log.debug(' Stack: %s, %s, %s', f.f_code.co_filename, f.f_code.co_name, f.f_lineno) + + with self.suspended_frames_manager.track_frames(self) as frames_tracker: + frames_tracker.track(thread_id, frames_list) + cmd = frames_tracker.create_thread_suspend_command(thread_id, stop_reason, message, suspend_type) + self.writer.add_command(cmd) + + with CustomFramesContainer.custom_frames_lock: # @UndefinedVariable + from_this_thread = [] + + for frame_custom_thread_id, custom_frame in CustomFramesContainer.custom_frames.items(): + if custom_frame.thread_id == thread.ident: + frames_tracker.track(thread_id, pydevd_frame_utils.create_frames_list_from_frame(custom_frame.frame), frame_custom_thread_id=frame_custom_thread_id) + # print('Frame created as thread: %s' % (frame_custom_thread_id,)) + + self.writer.add_command(self.cmd_factory.make_custom_frame_created_message( + frame_custom_thread_id, custom_frame.name)) + + self.writer.add_command( + frames_tracker.create_thread_suspend_command(frame_custom_thread_id, CMD_THREAD_SUSPEND, "", suspend_type)) + + from_this_thread.append(frame_custom_thread_id) + + with self._threads_suspended_single_notification.notify_thread_suspended(thread_id, stop_reason): + keep_suspended = self._do_wait_suspend(thread, frame, event, arg, suspend_type, from_this_thread, frames_tracker) + + frames_list = None + + if keep_suspended: + # This means that we should pause again after a set next statement. + self._threads_suspended_single_notification.increment_suspend_time() + self.do_wait_suspend(thread, frame, event, arg, exception_type) + if DebugInfoHolder.DEBUG_TRACE_LEVEL > 2: + pydev_log.debug('Leaving PyDB.do_wait_suspend: %s (%s) %s', thread, thread_id, id(thread)) + + def _do_wait_suspend(self, thread, frame, event, arg, suspend_type, from_this_thread, frames_tracker): + info = thread.additional_info + info.step_in_initial_location = None + keep_suspended = False + + with self._main_lock: # Use lock to check if suspended state changed + activate_gui = info.pydev_state == STATE_SUSPEND and not self.pydb_disposed + + in_main_thread = is_current_thread_main_thread() + if activate_gui and in_main_thread: + # before every stop check if matplotlib modules were imported inside script code + # or some GUI event loop needs to be activated + self._activate_gui_if_needed() + + while True: + with self._main_lock: # Use lock to check if suspended state changed + if info.pydev_state != STATE_SUSPEND or (self.pydb_disposed and not self.terminate_requested): + # Note: we can't exit here if terminate was requested while a breakpoint was hit. + break + + if in_main_thread and self.gui_in_use: + # call input hooks if only GUI is in use + self._call_input_hook() + + self.process_internal_commands() + time.sleep(0.01) + + self.cancel_async_evaluation(get_current_thread_id(thread), str(id(frame))) + + # process any stepping instructions + if info.pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE): + info.step_in_initial_location = (frame, frame.f_lineno) + if frame.f_code.co_flags & 0x80: # CO_COROUTINE = 0x80 + # When in a coroutine we switch to CMD_STEP_INTO_COROUTINE. + info.pydev_step_cmd = CMD_STEP_INTO_COROUTINE + info.pydev_step_stop = frame + self.set_trace_for_frame_and_parents(frame) + else: + info.pydev_step_stop = None + self.set_trace_for_frame_and_parents(frame) + + elif info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE, CMD_SMART_STEP_INTO): + info.pydev_step_stop = frame + self.set_trace_for_frame_and_parents(frame) + + elif info.pydev_step_cmd == CMD_RUN_TO_LINE or info.pydev_step_cmd == CMD_SET_NEXT_STATEMENT: + info.pydev_step_stop = None + self.set_trace_for_frame_and_parents(frame) + stop = False + response_msg = "" + try: + stop, _old_line, response_msg = self.set_next_statement(frame, event, info.pydev_func_name, info.pydev_next_line) + except ValueError as e: + response_msg = "%s" % e + finally: + seq = info.pydev_message + cmd = self.cmd_factory.make_set_next_stmnt_status_message(seq, stop, response_msg) + self.writer.add_command(cmd) + info.pydev_message = '' + + if stop: + # Uninstall the current frames tracker before running it. + frames_tracker.untrack_all() + cmd = self.cmd_factory.make_thread_run_message(get_current_thread_id(thread), info.pydev_step_cmd) + self.writer.add_command(cmd) + info.pydev_state = STATE_SUSPEND + thread.stop_reason = CMD_SET_NEXT_STATEMENT + keep_suspended = True + + else: + # Set next did not work... + info.pydev_original_step_cmd = -1 + info.pydev_step_cmd = -1 + info.pydev_state = STATE_SUSPEND + thread.stop_reason = CMD_THREAD_SUSPEND + # return to the suspend state and wait for other command (without sending any + # additional notification to the client). + return self._do_wait_suspend(thread, frame, event, arg, suspend_type, from_this_thread, frames_tracker) + + elif info.pydev_step_cmd in (CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE): + back_frame = frame.f_back + force_check_project_scope = info.pydev_step_cmd == CMD_STEP_RETURN_MY_CODE + + if force_check_project_scope or self.is_files_filter_enabled: + while back_frame is not None: + if self.apply_files_filter(back_frame, back_frame.f_code.co_filename, force_check_project_scope): + frame = back_frame + back_frame = back_frame.f_back + else: + break + + if back_frame is not None: + # steps back to the same frame (in a return call it will stop in the 'back frame' for the user) + info.pydev_step_stop = frame + self.set_trace_for_frame_and_parents(frame) + else: + # No back frame?!? -- this happens in jython when we have some frame created from an awt event + # (the previous frame would be the awt event, but this doesn't make part of 'jython', only 'java') + # so, if we're doing a step return in this situation, it's the same as just making it run + info.pydev_step_stop = None + info.pydev_original_step_cmd = -1 + info.pydev_step_cmd = -1 + info.pydev_state = STATE_RUN + + if PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING: + info.pydev_use_scoped_step_frame = False + if info.pydev_step_cmd in ( + CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE, + CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE + ): + # i.e.: We're stepping: check if the stepping should be scoped (i.e.: in ipython + # each line is executed separately in a new frame, in which case we need to consider + # the next line as if it was still in the same frame). + f = frame.f_back + if f and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[1]: + f = f.f_back + if f and f.f_code.co_name == PYDEVD_IPYTHON_CONTEXT[2]: + info.pydev_use_scoped_step_frame = True + pydev_log.info('Using (ipython) scoped stepping.') + del f + + del frame + cmd = self.cmd_factory.make_thread_run_message(get_current_thread_id(thread), info.pydev_step_cmd) + self.writer.add_command(cmd) + + with CustomFramesContainer.custom_frames_lock: + # The ones that remained on last_running must now be removed. + for frame_id in from_this_thread: + # print('Removing created frame: %s' % (frame_id,)) + self.writer.add_command(self.cmd_factory.make_thread_killed_message(frame_id)) + + return keep_suspended + + def do_stop_on_unhandled_exception(self, thread, frame, frames_byid, arg): + pydev_log.debug("We are stopping in unhandled exception.") + try: + add_exception_to_frame(frame, arg) + self.send_caught_exception_stack(thread, arg, id(frame)) + try: + self.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK) + self.do_wait_suspend(thread, frame, 'exception', arg, EXCEPTION_TYPE_UNHANDLED) + except: + self.send_caught_exception_stack_proceeded(thread) + except: + pydev_log.exception("We've got an error while stopping in unhandled exception: %s.", arg[0]) + finally: + remove_exception_from_frame(frame) + frame = None + + def set_trace_for_frame_and_parents(self, frame, **kwargs): + disable = kwargs.pop('disable', False) + assert not kwargs + + while frame is not None: + # Don't change the tracing on debugger-related files + file_type = self.get_file_type(frame) + + if file_type is None: + if disable: + pydev_log.debug('Disable tracing of frame: %s - %s', frame.f_code.co_filename, frame.f_code.co_name) + if frame.f_trace is not None and frame.f_trace is not NO_FTRACE: + frame.f_trace = NO_FTRACE + + elif frame.f_trace is not self.trace_dispatch: + pydev_log.debug('Set tracing of frame: %s - %s', frame.f_code.co_filename, frame.f_code.co_name) + frame.f_trace = self.trace_dispatch + else: + pydev_log.debug('SKIP set tracing of frame: %s - %s', frame.f_code.co_filename, frame.f_code.co_name) + + frame = frame.f_back + + del frame + + def _create_pydb_command_thread(self): + curr_pydb_command_thread = self.py_db_command_thread + if curr_pydb_command_thread is not None: + curr_pydb_command_thread.do_kill_pydev_thread() + + new_pydb_command_thread = self.py_db_command_thread = PyDBCommandThread(self) + new_pydb_command_thread.start() + + def _create_check_output_thread(self): + curr_output_checker_thread = self.check_alive_thread + if curr_output_checker_thread is not None: + curr_output_checker_thread.do_kill_pydev_thread() + + check_alive_thread = self.check_alive_thread = CheckAliveThread(self) + check_alive_thread.start() + + def start_auxiliary_daemon_threads(self): + self._create_pydb_command_thread() + self._create_check_output_thread() + + def __wait_for_threads_to_finish(self, timeout): + try: + with self._wait_for_threads_to_finish_called_lock: + wait_for_threads_to_finish_called = self._wait_for_threads_to_finish_called + self._wait_for_threads_to_finish_called = True + + if wait_for_threads_to_finish_called: + # Make sure that we wait for the previous call to be finished. + self._wait_for_threads_to_finish_called_event.wait(timeout=timeout) + else: + try: + + def get_pydb_daemon_threads_to_wait(): + pydb_daemon_threads = set(self.created_pydb_daemon_threads) + pydb_daemon_threads.discard(self.check_alive_thread) + pydb_daemon_threads.discard(threading.current_thread()) + return pydb_daemon_threads + + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads waiting for pydb daemon threads to finish") + started_at = time.time() + # Note: we wait for all except the check_alive_thread (which is not really a daemon + # thread and it can call this method itself). + while time.time() < started_at + timeout: + if len(get_pydb_daemon_threads_to_wait()) == 0: + break + time.sleep(1 / 10.) + else: + thread_names = [t.name for t in get_pydb_daemon_threads_to_wait()] + if thread_names: + pydev_log.debug("The following pydb threads may not have finished correctly: %s", + ', '.join(thread_names)) + finally: + self._wait_for_threads_to_finish_called_event.set() + except: + pydev_log.exception() + + def dispose_and_kill_all_pydevd_threads(self, wait=True, timeout=.5): + ''' + When this method is called we finish the debug session, terminate threads + and if this was registered as the global instance, unregister it -- afterwards + it should be possible to create a new instance and set as global to start + a new debug session. + + :param bool wait: + If True we'll wait for the threads to be actually finished before proceeding + (based on the available timeout). + Note that this must be thread-safe and if one thread is waiting the other thread should + also wait. + ''' + try: + back_frame = sys._getframe().f_back + pydev_log.debug( + 'PyDB.dispose_and_kill_all_pydevd_threads (called from: File "%s", line %s, in %s)', + back_frame.f_code.co_filename, back_frame.f_lineno, back_frame.f_code.co_name + ) + back_frame = None + with self._disposed_lock: + disposed = self.pydb_disposed + self.pydb_disposed = True + + if disposed: + if wait: + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads (already disposed - wait)") + self.__wait_for_threads_to_finish(timeout) + else: + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads (already disposed - no wait)") + return + + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads (first call)") + + # Wait until a time when there are no commands being processed to kill the threads. + started_at = time.time() + while time.time() < started_at + timeout: + with self._main_lock: + writer = self.writer + if writer is None or writer.empty(): + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads no commands being processed.") + break + else: + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads timed out waiting for writer to be empty.") + + pydb_daemon_threads = set(self.created_pydb_daemon_threads) + for t in pydb_daemon_threads: + if hasattr(t, 'do_kill_pydev_thread'): + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads killing thread: %s", t) + t.do_kill_pydev_thread() + + if wait: + self.__wait_for_threads_to_finish(timeout) + else: + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads: no wait") + + py_db = get_global_debugger() + if py_db is self: + set_global_debugger(None) + except: + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads: exception") + try: + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3: + pydev_log.exception() + except: + pass + finally: + pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads: finished") + + def prepare_to_run(self): + ''' Shared code to prepare debugging by installing traces and registering threads ''' + self.patch_threads() + self.start_auxiliary_daemon_threads() + + def patch_threads(self): + try: + # not available in jython! + threading.settrace(self.trace_dispatch) # for all future threads + except: + pass + + from _pydev_bundle.pydev_monkey import patch_thread_modules + patch_thread_modules() + + def run(self, file, globals=None, locals=None, is_module=False, set_trace=True): + module_name = None + entry_point_fn = '' + if is_module: + # When launching with `python -m `, python automatically adds + # an empty path to the PYTHONPATH which resolves files in the current + # directory, so, depending how pydevd itself is launched, we may need + # to manually add such an entry to properly resolve modules in the + # current directory (see: https://github.com/Microsoft/ptvsd/issues/1010). + if '' not in sys.path: + sys.path.insert(0, '') + file, _, entry_point_fn = file.partition(':') + module_name = file + filename = get_fullname(file) + if filename is None: + mod_dir = get_package_dir(module_name) + if mod_dir is None: + sys.stderr.write("No module named %s\n" % file) + return + else: + filename = get_fullname("%s.__main__" % module_name) + if filename is None: + sys.stderr.write("No module named %s\n" % file) + return + else: + file = filename + else: + file = filename + mod_dir = os.path.dirname(filename) + main_py = os.path.join(mod_dir, '__main__.py') + main_pyc = os.path.join(mod_dir, '__main__.pyc') + if filename.endswith('__init__.pyc'): + if os.path.exists(main_pyc): + filename = main_pyc + elif os.path.exists(main_py): + filename = main_py + elif filename.endswith('__init__.py'): + if os.path.exists(main_pyc) and not os.path.exists(main_py): + filename = main_pyc + elif os.path.exists(main_py): + filename = main_py + + sys.argv[0] = filename + + if os.path.isdir(file): + new_target = os.path.join(file, '__main__.py') + if os.path.isfile(new_target): + file = new_target + + m = None + if globals is None: + m = save_main_module(file, 'pydevd') + globals = m.__dict__ + try: + globals['__builtins__'] = __builtins__ + except NameError: + pass # Not there on Jython... + + if locals is None: + locals = globals + + # Predefined (writable) attributes: __name__ is the module's name; + # __doc__ is the module's documentation string, or None if unavailable; + # __file__ is the pathname of the file from which the module was loaded, + # if it was loaded from a file. The __file__ attribute is not present for + # C modules that are statically linked into the interpreter; for extension modules + # loaded dynamically from a shared library, it is the pathname of the shared library file. + + # I think this is an ugly hack, bug it works (seems to) for the bug that says that sys.path should be the same in + # debug and run. + if sys.path[0] != '' and m is not None and m.__file__.startswith(sys.path[0]): + # print >> sys.stderr, 'Deleting: ', sys.path[0] + del sys.path[0] + + if not is_module: + # now, the local directory has to be added to the pythonpath + # sys.path.insert(0, os.getcwd()) + # Changed: it's not the local directory, but the directory of the file launched + # The file being run must be in the pythonpath (even if it was not before) + sys.path.insert(0, os.path.split(os_path_abspath(file))[0]) + + if set_trace: + self.wait_for_ready_to_run() + + # call prepare_to_run when we already have all information about breakpoints + self.prepare_to_run() + + t = threadingCurrentThread() + thread_id = get_current_thread_id(t) + + if self.thread_analyser is not None: + wrap_threads() + self.thread_analyser.set_start_time(cur_time()) + send_concurrency_message("threading_event", 0, t.name, thread_id, "thread", "start", file, 1, None, parent=thread_id) + + if self.asyncio_analyser is not None: + # we don't have main thread in asyncio graph, so we should add a fake event + send_concurrency_message("asyncio_event", 0, "Task", "Task", "thread", "stop", file, 1, frame=None, parent=None) + + try: + if INTERACTIVE_MODE_AVAILABLE: + self.init_gui_support() + except: + pydev_log.exception("Matplotlib support in debugger failed") + + if hasattr(sys, 'exc_clear'): + # we should clean exception information in Python 2, before user's code execution + sys.exc_clear() + + # Notify that the main thread is created. + self.notify_thread_created(thread_id, t) + + # Note: important: set the tracing right before calling _exec. + if set_trace: + self.enable_tracing() + + return self._exec(is_module, entry_point_fn, module_name, file, globals, locals) + + def _exec(self, is_module, entry_point_fn, module_name, file, globals, locals): + ''' + This function should have frames tracked by unhandled exceptions (the `_exec` name is important). + ''' + if not is_module: + globals = pydevd_runpy.run_path(file, globals, '__main__') + else: + # treat ':' as a separator between module and entry point function + # if there is no entry point we run we same as with -m switch. Otherwise we perform + # an import and execute the entry point + if entry_point_fn: + mod = __import__(module_name, level=0, fromlist=[entry_point_fn], globals=globals, locals=locals) + func = getattr(mod, entry_point_fn) + func() + else: + # Run with the -m switch + globals = pydevd_runpy._run_module_as_main(module_name, alter_argv=False) + return globals + + def wait_for_commands(self, globals): + self._activate_gui_if_needed() + + thread = threading.current_thread() + from _pydevd_bundle import pydevd_frame_utils + frame = pydevd_frame_utils.Frame(None, -1, pydevd_frame_utils.FCode("Console", + os.path.abspath(os.path.dirname(__file__))), globals, globals) + thread_id = get_current_thread_id(thread) + self.add_fake_frame(thread_id, id(frame), frame) + + cmd = self.cmd_factory.make_show_console_message(self, thread_id, frame) + if self.writer is not None: + self.writer.add_command(cmd) + + while True: + if self.gui_in_use: + # call input hooks if only GUI is in use + self._call_input_hook() + self.process_internal_commands() + time.sleep(0.01) + + +class IDAPMessagesListener(object): + + def before_send(self, message_as_dict): + ''' + Called just before a message is sent to the IDE. + + :type message_as_dict: dict + ''' + + def after_receive(self, message_as_dict): + ''' + Called just after a message is received from the IDE. + + :type message_as_dict: dict + ''' + + +def add_dap_messages_listener(dap_messages_listener): + ''' + Adds a listener for the DAP (debug adapter protocol) messages. + + :type dap_messages_listener: IDAPMessagesListener + + :note: messages from the xml backend are not notified through this API. + + :note: the notifications are sent from threads and they are not synchronized (so, + it's possible that a message is sent and received from different threads at the same time). + ''' + py_db = get_global_debugger() + if py_db is None: + raise AssertionError('PyDB is still not setup.') + + py_db.add_dap_messages_listener(dap_messages_listener) + + +def send_json_message(msg): + ''' + API to send some custom json message. + + :param dict|pydevd_schema.BaseSchema msg: + The custom message to be sent. + + :return bool: + True if the message was added to the queue to be sent and False otherwise. + ''' + py_db = get_global_debugger() + if py_db is None: + return False + + writer = py_db.writer + if writer is None: + return False + + cmd = NetCommand(-1, 0, msg, is_json=True) + writer.add_command(cmd) + return True + + +def set_debug(setup): + setup['DEBUG_RECORD_SOCKET_READS'] = True + setup['DEBUG_TRACE_BREAKPOINTS'] = 1 + setup['DEBUG_TRACE_LEVEL'] = 3 + + +def enable_qt_support(qt_support_mode): + from _pydev_bundle import pydev_monkey_qt + pydev_monkey_qt.patch_qt(qt_support_mode) + + +def start_dump_threads_thread(filename_template, timeout, recurrent): + ''' + Helper to dump threads after a timeout. + + :param filename_template: + A template filename, such as 'c:/temp/thread_dump_%s.txt', where the %s will + be replaced by the time for the dump. + :param timeout: + The timeout (in seconds) for the dump. + :param recurrent: + If True we'll keep on doing thread dumps. + ''' + assert filename_template.count('%s') == 1, \ + 'Expected one %%s to appear in: %s' % (filename_template,) + + def _threads_on_timeout(): + try: + while True: + time.sleep(timeout) + filename = filename_template % (time.time(),) + try: + os.makedirs(os.path.dirname(filename)) + except Exception: + pass + with open(filename, 'w') as stream: + dump_threads(stream) + if not recurrent: + return + except Exception: + pydev_log.exception() + + t = threading.Thread(target=_threads_on_timeout) + mark_as_pydevd_daemon_thread(t) + t.start() + + +def dump_threads(stream=None): + ''' + Helper to dump thread info (default is printing to stderr). + ''' + pydevd_utils.dump_threads(stream) + + +def usage(doExit=0): + sys.stdout.write('Usage:\n') + sys.stdout.write('pydevd.py --port N [(--client hostname) | --server] --file executable [file_options]\n') + if doExit: + sys.exit(0) + + +def _init_stdout_redirect(): + pydevd_io.redirect_stream_to_pydb_io_messages(std='stdout') + + +def _init_stderr_redirect(): + pydevd_io.redirect_stream_to_pydb_io_messages(std='stderr') + + +def _enable_attach( + address, + dont_trace_start_patterns=(), + dont_trace_end_patterns=(), + patch_multiprocessing=False, + access_token=None, + client_access_token=None, + ): + ''' + Starts accepting connections at the given host/port. The debugger will not be initialized nor + configured, it'll only start accepting connections (and will have the tracing setup in this + thread). + + Meant to be used with the DAP (Debug Adapter Protocol) with _wait_for_attach(). + + :param address: (host, port) + :type address: tuple(str, int) + ''' + host = address[0] + port = int(address[1]) + + if SetupHolder.setup is not None: + if port != SetupHolder.setup['port']: + raise AssertionError('Unable to listen in port: %s (already listening in port: %s)' % (port, SetupHolder.setup['port'])) + settrace( + host=host, + port=port, + suspend=False, + wait_for_ready_to_run=False, + block_until_connected=False, + dont_trace_start_patterns=dont_trace_start_patterns, + dont_trace_end_patterns=dont_trace_end_patterns, + patch_multiprocessing=patch_multiprocessing, + access_token=access_token, + client_access_token=client_access_token, + ) + + py_db = get_global_debugger() + py_db.wait_for_server_socket_ready() + return py_db._server_socket_name + + +def _wait_for_attach(cancel=None): + ''' + Meant to be called after _enable_attach() -- the current thread will only unblock after a + connection is in place and the DAP (Debug Adapter Protocol) sends the ConfigurationDone + request. + ''' + py_db = get_global_debugger() + if py_db is None: + raise AssertionError('Debugger still not created. Please use _enable_attach() before using _wait_for_attach().') + + py_db.block_until_configuration_done(cancel=cancel) + + +def _is_attached(): + ''' + Can be called any time to check if the connection was established and the DAP (Debug Adapter Protocol) has sent + the ConfigurationDone request. + ''' + py_db = get_global_debugger() + return (py_db is not None) and py_db.is_attached() + + +#======================================================================================================================= +# settrace +#======================================================================================================================= +def settrace( + host=None, + stdout_to_server=False, + stderr_to_server=False, + port=5678, + suspend=True, + trace_only_current_thread=False, + overwrite_prev_trace=False, + patch_multiprocessing=False, + stop_at_frame=None, + block_until_connected=True, + wait_for_ready_to_run=True, + dont_trace_start_patterns=(), + dont_trace_end_patterns=(), + access_token=None, + client_access_token=None, + notify_stdin=True, + **kwargs + ): + '''Sets the tracing function with the pydev debug function and initializes needed facilities. + + :param host: the user may specify another host, if the debug server is not in the same machine (default is the local + host) + + :param stdout_to_server: when this is true, the stdout is passed to the debug server + + :param stderr_to_server: when this is true, the stderr is passed to the debug server + so that they are printed in its console and not in this process console. + + :param port: specifies which port to use for communicating with the server (note that the server must be started + in the same port). @note: currently it's hard-coded at 5678 in the client + + :param suspend: whether a breakpoint should be emulated as soon as this function is called. + + :param trace_only_current_thread: determines if only the current thread will be traced or all current and future + threads will also have the tracing enabled. + + :param overwrite_prev_trace: deprecated + + :param patch_multiprocessing: if True we'll patch the functions which create new processes so that launched + processes are debugged. + + :param stop_at_frame: if passed it'll stop at the given frame, otherwise it'll stop in the function which + called this method. + + :param wait_for_ready_to_run: if True settrace will block until the ready_to_run flag is set to True, + otherwise, it'll set ready_to_run to True and this function won't block. + + Note that if wait_for_ready_to_run == False, there are no guarantees that the debugger is synchronized + with what's configured in the client (IDE), the only guarantee is that when leaving this function + the debugger will be already connected. + + :param dont_trace_start_patterns: if set, then any path that starts with one fo the patterns in the collection + will not be traced + + :param dont_trace_end_patterns: if set, then any path that ends with one fo the patterns in the collection + will not be traced + + :param access_token: token to be sent from the client (i.e.: IDE) to the debugger when a connection + is established (verified by the debugger). + + :param client_access_token: token to be sent from the debugger to the client (i.e.: IDE) when + a connection is established (verified by the client). + + :param notify_stdin: + If True sys.stdin will be patched to notify the client when a message is requested + from the IDE. This is done so that when reading the stdin the client is notified. + Clients may need this to know when something that is being written should be interpreted + as an input to the process or as a command to be evaluated. + Note that parallel-python has issues with this (because it tries to assert that sys.stdin + is of a given type instead of just checking that it has what it needs). + ''' + + stdout_to_server = stdout_to_server or kwargs.get('stdoutToServer', False) # Backward compatibility + stderr_to_server = stderr_to_server or kwargs.get('stderrToServer', False) # Backward compatibility + + # Internal use (may be used to set the setup info directly for subprocesess). + __setup_holder__ = kwargs.get('__setup_holder__') + + with _set_trace_lock: + _locked_settrace( + host, + stdout_to_server, + stderr_to_server, + port, + suspend, + trace_only_current_thread, + patch_multiprocessing, + stop_at_frame, + block_until_connected, + wait_for_ready_to_run, + dont_trace_start_patterns, + dont_trace_end_patterns, + access_token, + client_access_token, + __setup_holder__=__setup_holder__, + notify_stdin=notify_stdin, + ) + + +_set_trace_lock = ForkSafeLock() + + +def _locked_settrace( + host, + stdout_to_server, + stderr_to_server, + port, + suspend, + trace_only_current_thread, + patch_multiprocessing, + stop_at_frame, + block_until_connected, + wait_for_ready_to_run, + dont_trace_start_patterns, + dont_trace_end_patterns, + access_token, + client_access_token, + __setup_holder__, + notify_stdin, + ): + if patch_multiprocessing: + try: + from _pydev_bundle import pydev_monkey + except: + pass + else: + pydev_monkey.patch_new_process_functions() + + if host is None: + from _pydev_bundle import pydev_localhost + host = pydev_localhost.get_localhost() + + global _global_redirect_stdout_to_server + global _global_redirect_stderr_to_server + + py_db = get_global_debugger() + if __setup_holder__: + SetupHolder.setup = __setup_holder__ + if py_db is None: + py_db = PyDB() + pydevd_vm_type.setup_type() + + if SetupHolder.setup is None: + setup = { + 'client': host, # dispatch expects client to be set to the host address when server is False + 'server': False, + 'port': int(port), + 'multiprocess': patch_multiprocessing, + 'skip-notify-stdin': not notify_stdin, + } + SetupHolder.setup = setup + + if access_token is not None: + py_db.authentication.access_token = access_token + SetupHolder.setup['access-token'] = access_token + if client_access_token is not None: + py_db.authentication.client_access_token = client_access_token + SetupHolder.setup['client-access-token'] = client_access_token + + if block_until_connected: + py_db.connect(host, port) # Note: connect can raise error. + else: + # Create a dummy writer and wait for the real connection. + py_db.writer = WriterThread(NULL, py_db, terminate_on_socket_close=False) + py_db.create_wait_for_connection_thread() + + if dont_trace_start_patterns or dont_trace_end_patterns: + PyDevdAPI().set_dont_trace_start_end_patterns(py_db, dont_trace_start_patterns, dont_trace_end_patterns) + + _global_redirect_stdout_to_server = stdout_to_server + _global_redirect_stderr_to_server = stderr_to_server + + if _global_redirect_stdout_to_server: + _init_stdout_redirect() + + if _global_redirect_stderr_to_server: + _init_stderr_redirect() + + if notify_stdin: + patch_stdin() + + t = threadingCurrentThread() + additional_info = set_additional_thread_info(t) + + if not wait_for_ready_to_run: + py_db.ready_to_run = True + + py_db.wait_for_ready_to_run() + py_db.start_auxiliary_daemon_threads() + + try: + if INTERACTIVE_MODE_AVAILABLE: + py_db.init_gui_support() + except: + pydev_log.exception("Matplotlib support in debugger failed") + + if trace_only_current_thread: + py_db.enable_tracing() + else: + # Trace future threads. + py_db.patch_threads() + + py_db.enable_tracing(py_db.trace_dispatch, apply_to_all_threads=True) + + # As this is the first connection, also set tracing for any untraced threads + py_db.set_tracing_for_untraced_contexts() + + py_db.set_trace_for_frame_and_parents(get_frame().f_back) + + with CustomFramesContainer.custom_frames_lock: # @UndefinedVariable + for _frameId, custom_frame in CustomFramesContainer.custom_frames.items(): + py_db.set_trace_for_frame_and_parents(custom_frame.frame) + + else: + # ok, we're already in debug mode, with all set, so, let's just set the break + if access_token is not None: + py_db.authentication.access_token = access_token + if client_access_token is not None: + py_db.authentication.client_access_token = client_access_token + + py_db.set_trace_for_frame_and_parents(get_frame().f_back) + + t = threadingCurrentThread() + additional_info = set_additional_thread_info(t) + + if trace_only_current_thread: + py_db.enable_tracing() + else: + # Trace future threads. + py_db.patch_threads() + py_db.enable_tracing(py_db.trace_dispatch, apply_to_all_threads=True) + + # Suspend as the last thing after all tracing is in place. + if suspend: + if stop_at_frame is not None: + # If the step was set we have to go to run state and + # set the proper frame for it to stop. + additional_info.pydev_state = STATE_RUN + additional_info.pydev_original_step_cmd = CMD_STEP_OVER + additional_info.pydev_step_cmd = CMD_STEP_OVER + additional_info.pydev_step_stop = stop_at_frame + additional_info.suspend_type = PYTHON_SUSPEND + else: + # Ask to break as soon as possible. + py_db.set_suspend(t, CMD_SET_BREAK) + + +def stoptrace(): + pydev_log.debug("pydevd.stoptrace()") + pydevd_tracing.restore_sys_set_trace_func() + sys.settrace(None) + try: + # not available in jython! + threading.settrace(None) # for all future threads + except: + pass + + from _pydev_bundle.pydev_monkey import undo_patch_thread_modules + undo_patch_thread_modules() + + # Either or both standard streams can be closed at this point, + # in which case flush() will fail. + try: + sys.stdout.flush() + except: + pass + try: + sys.stderr.flush() + except: + pass + + py_db = get_global_debugger() + + if py_db is not None: + py_db.dispose_and_kill_all_pydevd_threads() + + +class Dispatcher(object): + + def __init__(self): + self.port = None + + def connect(self, host, port): + self.host = host + self.port = port + self.client = start_client(self.host, self.port) + self.reader = DispatchReader(self) + self.reader.pydev_do_not_trace = False # we run reader in the same thread so we don't want to loose tracing + self.reader.run() + + def close(self): + try: + self.reader.do_kill_pydev_thread() + except: + pass + + +class DispatchReader(ReaderThread): + + def __init__(self, dispatcher): + self.dispatcher = dispatcher + + ReaderThread.__init__( + self, + get_global_debugger(), + self.dispatcher.client, + PyDevJsonCommandProcessor=PyDevJsonCommandProcessor, + process_net_command=process_net_command, + ) + + @overrides(ReaderThread._on_run) + def _on_run(self): + dummy_thread = threading.current_thread() + dummy_thread.is_pydev_daemon_thread = False + return ReaderThread._on_run(self) + + @overrides(PyDBDaemonThread.do_kill_pydev_thread) + def do_kill_pydev_thread(self): + if not self._kill_received: + ReaderThread.do_kill_pydev_thread(self) + try: + self.sock.shutdown(SHUT_RDWR) + except: + pass + try: + self.sock.close() + except: + pass + + def process_command(self, cmd_id, seq, text): + if cmd_id == 99: + self.dispatcher.port = int(text) + self._kill_received = True + + +DISPATCH_APPROACH_NEW_CONNECTION = 1 # Used by PyDev +DISPATCH_APPROACH_EXISTING_CONNECTION = 2 # Used by PyCharm +DISPATCH_APPROACH = DISPATCH_APPROACH_NEW_CONNECTION + + +def dispatch(): + setup = SetupHolder.setup + host = setup['client'] + port = setup['port'] + if DISPATCH_APPROACH == DISPATCH_APPROACH_EXISTING_CONNECTION: + dispatcher = Dispatcher() + try: + dispatcher.connect(host, port) + port = dispatcher.port + finally: + dispatcher.close() + return host, port + + +def settrace_forked(setup_tracing=True): + ''' + When creating a fork from a process in the debugger, we need to reset the whole debugger environment! + ''' + from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder + py_db = GlobalDebuggerHolder.global_dbg + if py_db is not None: + py_db.created_pydb_daemon_threads = {} # Just making sure we won't touch those (paused) threads. + py_db = None + + GlobalDebuggerHolder.global_dbg = None + threading.current_thread().additional_info = None + + # Make sure that we keep the same access tokens for subprocesses started through fork. + setup = SetupHolder.setup + if setup is None: + setup = {} + else: + # i.e.: Get the ppid at this point as it just changed. + # If we later do an exec() it should remain the same ppid. + setup[pydevd_constants.ARGUMENT_PPID] = PyDevdAPI().get_ppid() + access_token = setup.get('access-token') + client_access_token = setup.get('client-access-token') + + if setup_tracing: + from _pydevd_frame_eval.pydevd_frame_eval_main import clear_thread_local_info + host, port = dispatch() + + import pydevd_tracing + pydevd_tracing.restore_sys_set_trace_func() + + if setup_tracing: + if port is not None: + custom_frames_container_init() + + if clear_thread_local_info is not None: + clear_thread_local_info() + + settrace( + host, + port=port, + suspend=False, + trace_only_current_thread=False, + overwrite_prev_trace=True, + patch_multiprocessing=True, + access_token=access_token, + client_access_token=client_access_token, + ) + + +@contextmanager +def skip_subprocess_arg_patch(): + ''' + May be used to skip the monkey-patching that pydevd does to + skip changing arguments to embed the debugger into child processes. + + i.e.: + + with pydevd.skip_subprocess_arg_patch(): + subprocess.call(...) + ''' + from _pydev_bundle import pydev_monkey + with pydev_monkey.skip_subprocess_arg_patch(): + yield + + +def add_dont_terminate_child_pid(pid): + ''' + May be used to ask pydevd to skip the termination of some process + when it's asked to terminate (debug adapter protocol only). + + :param int pid: + The pid to be ignored. + + i.e.: + + process = subprocess.Popen(...) + pydevd.add_dont_terminate_child_pid(process.pid) + ''' + py_db = get_global_debugger() + if py_db is not None: + py_db.dont_terminate_child_pids.add(pid) + + +class SetupHolder: + + setup = None + + +def apply_debugger_options(setup_options): + """ + + :type setup_options: dict[str, bool] + """ + default_options = {'save-signatures': False, 'qt-support': ''} + default_options.update(setup_options) + setup_options = default_options + + debugger = get_global_debugger() + if setup_options['save-signatures']: + if pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON: + sys.stderr.write("Collecting run-time type information is not supported for Jython\n") + else: + # Only import it if we're going to use it! + from _pydevd_bundle.pydevd_signature import SignatureFactory + debugger.signature_factory = SignatureFactory() + + if setup_options['qt-support']: + enable_qt_support(setup_options['qt-support']) + + +@call_only_once +def patch_stdin(): + _internal_patch_stdin(None, sys, getpass_mod) + + +def _internal_patch_stdin(py_db=None, sys=None, getpass_mod=None): + ''' + Note: don't use this function directly, use `patch_stdin()` instead. + (this function is only meant to be used on test-cases to avoid patching the actual globals). + ''' + # Patch stdin so that we notify when readline() is called. + original_sys_stdin = sys.stdin + debug_console_stdin = DebugConsoleStdIn(py_db, original_sys_stdin) + sys.stdin = debug_console_stdin + + _original_getpass = getpass_mod.getpass + + @functools.wraps(_original_getpass) + def getpass(*args, **kwargs): + with DebugConsoleStdIn.notify_input_requested(debug_console_stdin): + try: + curr_stdin = sys.stdin + if curr_stdin is debug_console_stdin: + sys.stdin = original_sys_stdin + return _original_getpass(*args, **kwargs) + finally: + sys.stdin = curr_stdin + + getpass_mod.getpass = getpass + +# Dispatch on_debugger_modules_loaded here, after all primary py_db modules are loaded + + +for handler in pydevd_extension_utils.extensions_of_type(DebuggerEventHandler): + handler.on_debugger_modules_loaded(debugger_version=__version__) + + +#======================================================================================================================= +# main +#======================================================================================================================= +def main(): + + # parse the command line. --file is our last argument that is required + pydev_log.debug("Initial arguments: %s", (sys.argv,)) + pydev_log.debug("Current pid: %s", os.getpid()) + try: + from _pydevd_bundle.pydevd_command_line_handling import process_command_line + setup = process_command_line(sys.argv) + SetupHolder.setup = setup + except ValueError: + pydev_log.exception() + usage(1) + + if setup['print-in-debugger-startup']: + try: + pid = ' (pid: %s)' % os.getpid() + except: + pid = '' + sys.stderr.write("pydev debugger: starting%s\n" % pid) + + pydev_log.debug("Executing file %s", setup['file']) + pydev_log.debug("arguments: %s", (sys.argv,)) + + pydevd_vm_type.setup_type(setup.get('vm_type', None)) + + if SHOW_DEBUG_INFO_ENV: + set_debug(setup) + + DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = setup.get('DEBUG_RECORD_SOCKET_READS', DebugInfoHolder.DEBUG_RECORD_SOCKET_READS) + DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = setup.get('DEBUG_TRACE_BREAKPOINTS', DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS) + DebugInfoHolder.DEBUG_TRACE_LEVEL = setup.get('DEBUG_TRACE_LEVEL', DebugInfoHolder.DEBUG_TRACE_LEVEL) + + port = setup['port'] + host = setup['client'] + f = setup['file'] + fix_app_engine_debug = False + + debugger = get_global_debugger() + if debugger is None: + debugger = PyDB() + + try: + from _pydev_bundle import pydev_monkey + except: + pass # Not usable on jython 2.1 + else: + if setup['multiprocess']: # PyDev + pydev_monkey.patch_new_process_functions() + + elif setup['multiproc']: # PyCharm + pydev_log.debug("Started in multiproc mode\n") + global DISPATCH_APPROACH + DISPATCH_APPROACH = DISPATCH_APPROACH_EXISTING_CONNECTION + + dispatcher = Dispatcher() + try: + dispatcher.connect(host, port) + if dispatcher.port is not None: + port = dispatcher.port + pydev_log.debug("Received port %d\n", port) + pydev_log.info("pydev debugger: process %d is connecting\n" % os.getpid()) + + try: + pydev_monkey.patch_new_process_functions() + except: + pydev_log.exception("Error patching process functions.") + else: + pydev_log.critical("pydev debugger: couldn't get port for new debug process.") + finally: + dispatcher.close() + else: + try: + pydev_monkey.patch_new_process_functions_with_warning() + except: + pydev_log.exception("Error patching process functions.") + + # Only do this patching if we're not running with multiprocess turned on. + if f.find('dev_appserver.py') != -1: + if os.path.basename(f).startswith('dev_appserver.py'): + appserver_dir = os.path.dirname(f) + version_file = os.path.join(appserver_dir, 'VERSION') + if os.path.exists(version_file): + try: + stream = open(version_file, 'r') + try: + for line in stream.read().splitlines(): + line = line.strip() + if line.startswith('release:'): + line = line[8:].strip() + version = line.replace('"', '') + version = version.split('.') + if int(version[0]) > 1: + fix_app_engine_debug = True + + elif int(version[0]) == 1: + if int(version[1]) >= 7: + # Only fix from 1.7 onwards + fix_app_engine_debug = True + break + finally: + stream.close() + except: + pydev_log.exception() + + try: + # In the default run (i.e.: run directly on debug mode), we try to patch stackless as soon as possible + # on a run where we have a remote debug, we may have to be more careful because patching stackless means + # that if the user already had a stackless.set_schedule_callback installed, he'd loose it and would need + # to call it again (because stackless provides no way of getting the last function which was registered + # in set_schedule_callback). + # + # So, ideally, if there's an application using stackless and the application wants to use the remote debugger + # and benefit from stackless debugging, the application itself must call: + # + # import pydevd_stackless + # pydevd_stackless.patch_stackless() + # + # itself to be able to benefit from seeing the tasklets created before the remote debugger is attached. + from _pydevd_bundle import pydevd_stackless + pydevd_stackless.patch_stackless() + except: + # It's ok not having stackless there... + try: + if hasattr(sys, 'exc_clear'): + sys.exc_clear() # the exception information should be cleaned in Python 2 + except: + pass + + is_module = setup['module'] + if not setup['skip-notify-stdin']: + patch_stdin() + + if setup[pydevd_constants.ARGUMENT_JSON_PROTOCOL]: + PyDevdAPI().set_protocol(debugger, 0, JSON_PROTOCOL) + + elif setup[pydevd_constants.ARGUMENT_HTTP_JSON_PROTOCOL]: + PyDevdAPI().set_protocol(debugger, 0, HTTP_JSON_PROTOCOL) + + elif setup[pydevd_constants.ARGUMENT_HTTP_PROTOCOL]: + PyDevdAPI().set_protocol(debugger, 0, pydevd_constants.HTTP_PROTOCOL) + + elif setup[pydevd_constants.ARGUMENT_QUOTED_LINE_PROTOCOL]: + PyDevdAPI().set_protocol(debugger, 0, pydevd_constants.QUOTED_LINE_PROTOCOL) + + access_token = setup['access-token'] + if access_token: + debugger.authentication.access_token = access_token + + client_access_token = setup['client-access-token'] + if client_access_token: + debugger.authentication.client_access_token = client_access_token + + if fix_app_engine_debug: + sys.stderr.write("pydev debugger: google app engine integration enabled\n") + curr_dir = os.path.dirname(__file__) + app_engine_startup_file = os.path.join(curr_dir, 'pydev_app_engine_debug_startup.py') + + sys.argv.insert(1, '--python_startup_script=' + app_engine_startup_file) + import json + setup['pydevd'] = __file__ + sys.argv.insert(2, '--python_startup_args=%s' % json.dumps(setup),) + sys.argv.insert(3, '--automatic_restart=no') + sys.argv.insert(4, '--max_module_instances=1') + + # Run the dev_appserver + debugger.run(setup['file'], None, None, is_module, set_trace=False) + else: + if setup['save-threading']: + debugger.thread_analyser = ThreadingLogger() + if setup['save-asyncio']: + debugger.asyncio_analyser = AsyncioLogger() + + apply_debugger_options(setup) + + try: + debugger.connect(host, port) + except: + sys.stderr.write("Could not connect to %s: %s\n" % (host, port)) + pydev_log.exception() + sys.exit(1) + + globals = debugger.run(setup['file'], None, None, is_module) + + if setup['cmd-line']: + debugger.wait_for_commands(globals) + + +if __name__ == '__main__': + main() diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/README.txt b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/README.txt new file mode 120000 index 0000000..9ac001e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/README.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/26/26/cb/a1d538973c2f1b223c4cd6df621bbadba30fa58377cf8600c3cdb34cf5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_always_live_program.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_always_live_program.cpython-38.pyc new file mode 100644 index 0000000..3d5eae8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_always_live_program.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_check.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_check.cpython-38.pyc new file mode 100644 index 0000000..e4ee5af Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_check.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_test_attach_to_process.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_test_attach_to_process.cpython-38.pyc new file mode 100644 index 0000000..54af595 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_test_attach_to_process.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_test_attach_to_process_linux.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_test_attach_to_process_linux.cpython-38.pyc new file mode 100644 index 0000000..151deae Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/_test_attach_to_process_linux.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/add_code_to_python_process.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/add_code_to_python_process.cpython-38.pyc new file mode 100644 index 0000000..19cb36e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/add_code_to_python_process.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/attach_pydevd.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/attach_pydevd.cpython-38.pyc new file mode 100644 index 0000000..03b2bcb Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/attach_pydevd.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/attach_script.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/attach_script.cpython-38.pyc new file mode 100644 index 0000000..9e52982 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/__pycache__/attach_script.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_always_live_program.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_always_live_program.py new file mode 120000 index 0000000..3382181 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_always_live_program.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/23/c0/9b/ab64d1d387deae13750cc76cf5a8054ea17c4524fa3f0e9d883147afaa \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_check.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_check.py new file mode 100644 index 0000000..2dbeafe --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_check.py @@ -0,0 +1,2 @@ +import add_code_to_python_process +print(add_code_to_python_process.run_python_code(3736, "print(20)", connect_debugger_tracing=False)) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process.py new file mode 120000 index 0000000..4f8c4df --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/90/cf/4b/160cd7fea4287c36ff5f48583126fc41fa206be7b1281967837fff299c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process_linux.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process_linux.py new file mode 120000 index 0000000..218a195 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process_linux.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/01/32/fc/5af35c18f8b7c131ed4d02f6dd484ac676ed1bd770077313644882fb61 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/add_code_to_python_process.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/add_code_to_python_process.py new file mode 100644 index 0000000..462feae --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/add_code_to_python_process.py @@ -0,0 +1,606 @@ +r''' +Copyright: Brainwy Software Ltda. + +License: EPL. +============= + +Works for Windows by using an executable that'll inject a dll to a process and call a function. + +Note: https://github.com/fabioz/winappdbg is used just to determine if the target process is 32 or 64 bits. + +Works for Linux relying on gdb. + +Limitations: +============ + + Linux: + ------ + + 1. It possible that ptrace is disabled: /etc/sysctl.d/10-ptrace.conf + + Note that even enabling it in /etc/sysctl.d/10-ptrace.conf (i.e.: making the + ptrace_scope=0), it's possible that we need to run the application that'll use ptrace (or + gdb in this case) as root (so, we must sudo the python which'll run this module). + + 2. It currently doesn't work in debug builds (i.e.: python_d) + + +Other implementations: +- pyrasite.com: + GPL + Windows/linux (in Linux it also uses gdb to connect -- although specifics are different as we use a dll to execute + code with other threads stopped). It's Windows approach is more limited because it doesn't seem to deal properly with + Python 3 if threading is disabled. + +- https://github.com/google/pyringe: + Apache v2. + Only linux/Python 2. + +- http://pytools.codeplex.com: + Apache V2 + Windows Only (but supports mixed mode debugging) + Our own code relies heavily on a part of it: http://pytools.codeplex.com/SourceControl/latest#Python/Product/PyDebugAttach/PyDebugAttach.cpp + to overcome some limitations of attaching and running code in the target python executable on Python 3. + See: attach.cpp + +Linux: References if we wanted to use a pure-python debugger: + https://bitbucket.org/haypo/python-ptrace/ + http://stackoverflow.com/questions/7841573/how-to-get-an-error-message-for-errno-value-in-python + Jugaad: + https://www.defcon.org/images/defcon-19/dc-19-presentations/Jakhar/DEFCON-19-Jakhar-Jugaad-Linux-Thread-Injection.pdf + https://github.com/aseemjakhar/jugaad + +Something else (general and not Python related): +- http://www.codeproject.com/Articles/4610/Three-Ways-to-Inject-Your-Code-into-Another-Proces + +Other references: +- https://github.com/haypo/faulthandler +- http://nedbatchelder.com/text/trace-function.html +- https://github.com/python-git/python/blob/master/Python/sysmodule.c (sys_settrace) +- https://github.com/python-git/python/blob/master/Python/ceval.c (PyEval_SetTrace) +- https://github.com/python-git/python/blob/master/Python/thread.c (PyThread_get_key_value) + + +To build the dlls needed on windows, visual studio express 13 was used (see compile_dll.bat) + +See: attach_pydevd.py to attach the pydev debugger to a running python process. +''' + +# Note: to work with nasm compiling asm to code and decompiling to see asm with shellcode: +# x:\nasm\nasm-2.07-win32\nasm-2.07\nasm.exe +# nasm.asm&x:\nasm\nasm-2.07-win32\nasm-2.07\ndisasm.exe -b arch nasm +import ctypes +import os +import struct +import subprocess +import sys +import time +from contextlib import contextmanager +import platform +import traceback + +try: + TimeoutError = TimeoutError # @ReservedAssignment +except NameError: + + class TimeoutError(RuntimeError): # @ReservedAssignment + pass + + +@contextmanager +def _create_win_event(name): + from winappdbg.win32.kernel32 import CreateEventA, WaitForSingleObject, CloseHandle + + manual_reset = False # i.e.: after someone waits it, automatically set to False. + initial_state = False + if not isinstance(name, bytes): + name = name.encode('utf-8') + event = CreateEventA(None, manual_reset, initial_state, name) + if not event: + raise ctypes.WinError() + + class _WinEvent(object): + + def wait_for_event_set(self, timeout=None): + ''' + :param timeout: in seconds + ''' + if timeout is None: + timeout = 0xFFFFFFFF + else: + timeout = int(timeout * 1000) + ret = WaitForSingleObject(event, timeout) + if ret in (0, 0x80): + return True + elif ret == 0x102: + # Timed out + return False + else: + raise ctypes.WinError() + + try: + yield _WinEvent() + finally: + CloseHandle(event) + + +IS_WINDOWS = sys.platform == 'win32' +IS_LINUX = sys.platform in ('linux', 'linux2') +IS_MAC = sys.platform == 'darwin' + + +def is_python_64bit(): + return (struct.calcsize('P') == 8) + + +def get_target_filename(is_target_process_64=None, prefix=None, extension=None): + # Note: we have an independent (and similar -- but not equal) version of this method in + # `pydevd_tracing.py` which should be kept synchronized with this one (we do a copy + # because the `pydevd_attach_to_process` is mostly independent and shouldn't be imported in the + # debugger -- the only situation where it's imported is if the user actually does an attach to + # process, through `attach_pydevd.py`, but this should usually be called from the IDE directly + # and not from the debugger). + libdir = os.path.dirname(__file__) + + if is_target_process_64 is None: + if IS_WINDOWS: + # i.e.: On windows the target process could have a different bitness (32bit is emulated on 64bit). + raise AssertionError("On windows it's expected that the target bitness is specified.") + + # For other platforms, just use the the same bitness of the process we're running in. + is_target_process_64 = is_python_64bit() + + arch = '' + if IS_WINDOWS: + # prefer not using platform.machine() when possible (it's a bit heavyweight as it may + # spawn a subprocess). + arch = os.environ.get("PROCESSOR_ARCHITEW6432", os.environ.get('PROCESSOR_ARCHITECTURE', '')) + + if not arch: + arch = platform.machine() + if not arch: + print('platform.machine() did not return valid value.') # This shouldn't happen... + return None + + if IS_WINDOWS: + if not extension: + extension = '.dll' + suffix_64 = 'amd64' + suffix_32 = 'x86' + + elif IS_LINUX: + if not extension: + extension = '.so' + suffix_64 = 'amd64' + suffix_32 = 'x86' + + elif IS_MAC: + if not extension: + extension = '.dylib' + suffix_64 = 'x86_64' + suffix_32 = 'x86' + + else: + print('Unable to attach to process in platform: %s', sys.platform) + return None + + if arch.lower() not in ('amd64', 'x86', 'x86_64', 'i386', 'x86'): + # We don't support this processor by default. Still, let's support the case where the + # user manually compiled it himself with some heuristics. + # + # Ideally the user would provide a library in the format: "attach_." + # based on the way it's currently compiled -- see: + # - windows/compile_windows.bat + # - linux_and_mac/compile_linux.sh + # - linux_and_mac/compile_mac.sh + + try: + found = [name for name in os.listdir(libdir) if name.startswith('attach_') and name.endswith(extension)] + except: + print('Error listing dir: %s' % (libdir,)) + traceback.print_exc() + return None + + if prefix: + expected_name = prefix + arch + extension + expected_name_linux = prefix + 'linux_' + arch + extension + else: + # Default is looking for the attach_ / attach_linux + expected_name = 'attach_' + arch + extension + expected_name_linux = 'attach_linux_' + arch + extension + + filename = None + if expected_name in found: # Heuristic: user compiled with "attach_." + filename = os.path.join(libdir, expected_name) + + elif IS_LINUX and expected_name_linux in found: # Heuristic: user compiled with "attach_linux_." + filename = os.path.join(libdir, expected_name_linux) + + elif len(found) == 1: # Heuristic: user removed all libraries and just left his own lib. + filename = os.path.join(libdir, found[0]) + + else: # Heuristic: there's one additional library which doesn't seem to be our own. Find the odd one. + filtered = [name for name in found if not name.endswith((suffix_64 + extension, suffix_32 + extension))] + if len(filtered) == 1: # If more than one is available we can't be sure... + filename = os.path.join(libdir, found[0]) + + if filename is None: + print( + 'Unable to attach to process in arch: %s (did not find %s in %s).' % ( + arch, expected_name, libdir + ) + ) + return None + + print('Using %s in arch: %s.' % (filename, arch)) + + else: + if is_target_process_64: + suffix = suffix_64 + else: + suffix = suffix_32 + + if not prefix: + # Default is looking for the attach_ / attach_linux + if IS_WINDOWS or IS_MAC: # just the extension changes + prefix = 'attach_' + elif IS_LINUX: + prefix = 'attach_linux_' # historically it has a different name + else: + print('Unable to attach to process in platform: %s' % (sys.platform,)) + return None + + filename = os.path.join(libdir, '%s%s%s' % (prefix, suffix, extension)) + + if not os.path.exists(filename): + print('Expected: %s to exist.' % (filename,)) + return None + + return filename + + +def run_python_code_windows(pid, python_code, connect_debugger_tracing=False, show_debug_info=0): + assert '\'' not in python_code, 'Having a single quote messes with our command.' + from winappdbg.process import Process + if not isinstance(python_code, bytes): + python_code = python_code.encode('utf-8') + + process = Process(pid) + bits = process.get_bits() + is_target_process_64 = bits == 64 + + # Note: this restriction no longer applies (we create a process with the proper bitness from + # this process so that the attach works). + # if is_target_process_64 != is_python_64bit(): + # raise RuntimeError("The architecture of the Python used to connect doesn't match the architecture of the target.\n" + # "Target 64 bits: %s\n" + # "Current Python 64 bits: %s" % (is_target_process_64, is_python_64bit())) + + with _acquire_mutex('_pydevd_pid_attach_mutex_%s' % (pid,), 10): + print('--- Connecting to %s bits target (current process is: %s) ---' % (bits, 64 if is_python_64bit() else 32)) + + with _win_write_to_shared_named_memory(python_code, pid): + + target_executable = get_target_filename(is_target_process_64, 'inject_dll_', '.exe') + if not target_executable: + raise RuntimeError('Could not find expected .exe file to inject dll in attach to process.') + + target_dll = get_target_filename(is_target_process_64) + if not target_dll: + raise RuntimeError('Could not find expected .dll file in attach to process.') + + print('\n--- Injecting attach dll: %s into pid: %s ---' % (os.path.basename(target_dll), pid)) + args = [target_executable, str(pid), target_dll] + subprocess.check_call(args) + + # Now, if the first injection worked, go on to the second which will actually + # run the code. + target_dll_run_on_dllmain = get_target_filename(is_target_process_64, 'run_code_on_dllmain_', '.dll') + if not target_dll_run_on_dllmain: + raise RuntimeError('Could not find expected .dll in attach to process.') + + with _create_win_event('_pydevd_pid_event_%s' % (pid,)) as event: + print('\n--- Injecting run code dll: %s into pid: %s ---' % (os.path.basename(target_dll_run_on_dllmain), pid)) + args = [target_executable, str(pid), target_dll_run_on_dllmain] + subprocess.check_call(args) + + if not event.wait_for_event_set(10): + print('Timeout error: the attach may not have completed.') + print('--- Finished dll injection ---\n') + + return 0 + + +@contextmanager +def _acquire_mutex(mutex_name, timeout): + ''' + Only one process may be attaching to a pid, so, create a system mutex + to make sure this holds in practice. + ''' + from winappdbg.win32.kernel32 import CreateMutex, GetLastError, CloseHandle + from winappdbg.win32.defines import ERROR_ALREADY_EXISTS + + initial_time = time.time() + while True: + mutex = CreateMutex(None, True, mutex_name) + acquired = GetLastError() != ERROR_ALREADY_EXISTS + if acquired: + break + if time.time() - initial_time > timeout: + raise TimeoutError('Unable to acquire mutex to make attach before timeout.') + time.sleep(.2) + + try: + yield + finally: + CloseHandle(mutex) + + +@contextmanager +def _win_write_to_shared_named_memory(python_code, pid): + # Use the definitions from winappdbg when possible. + from winappdbg.win32 import defines + from winappdbg.win32.kernel32 import ( + CreateFileMapping, + MapViewOfFile, + CloseHandle, + UnmapViewOfFile, + ) + + memmove = ctypes.cdll.msvcrt.memmove + memmove.argtypes = [ + ctypes.c_void_p, + ctypes.c_void_p, + defines.SIZE_T, + ] + memmove.restype = ctypes.c_void_p + + # Note: BUFSIZE must be the same from run_code_in_memory.hpp + BUFSIZE = 2048 + assert isinstance(python_code, bytes) + assert len(python_code) > 0, 'Python code must not be empty.' + # Note: -1 so that we're sure we'll add a \0 to the end. + assert len(python_code) < BUFSIZE - 1, 'Python code must have at most %s bytes (found: %s)' % (BUFSIZE - 1, len(python_code)) + + python_code += b'\0' * (BUFSIZE - len(python_code)) + assert python_code.endswith(b'\0') + + INVALID_HANDLE_VALUE = -1 + PAGE_READWRITE = 0x4 + FILE_MAP_WRITE = 0x2 + filemap = CreateFileMapping( + INVALID_HANDLE_VALUE, 0, PAGE_READWRITE, 0, BUFSIZE, u"__pydevd_pid_code_to_run__%s" % (pid,)) + + if filemap == INVALID_HANDLE_VALUE or filemap is None: + raise Exception("Failed to create named file mapping (ctypes: CreateFileMapping): %s" % (filemap,)) + try: + view = MapViewOfFile(filemap, FILE_MAP_WRITE, 0, 0, 0) + if not view: + raise Exception("Failed to create view of named file mapping (ctypes: MapViewOfFile).") + + try: + memmove(view, python_code, BUFSIZE) + yield + finally: + UnmapViewOfFile(view) + finally: + CloseHandle(filemap) + + +def run_python_code_linux(pid, python_code, connect_debugger_tracing=False, show_debug_info=0): + assert '\'' not in python_code, 'Having a single quote messes with our command.' + + target_dll = get_target_filename() + if not target_dll: + raise RuntimeError('Could not find .so for attach to process.') + target_dll_name = os.path.splitext(os.path.basename(target_dll))[0] + + # Note: we currently don't support debug builds + is_debug = 0 + # Note that the space in the beginning of each line in the multi-line is important! + cmd = [ + 'gdb', + '--nw', # no gui interface + '--nh', # no ~/.gdbinit + '--nx', # no .gdbinit +# '--quiet', # no version number on startup + '--pid', + str(pid), + '--batch', +# '--batch-silent', + ] + + # PYDEVD_GDB_SCAN_SHARED_LIBRARIES can be a list of strings with the shared libraries + # which should be scanned by default to make the attach to process (i.e.: libdl, libltdl, libc, libfreebl3). + # + # The default is scanning all shared libraries, but on some cases this can be in the 20-30 + # seconds range for some corner cases. + # See: https://github.com/JetBrains/intellij-community/pull/1608 + # + # By setting PYDEVD_GDB_SCAN_SHARED_LIBRARIES (to a comma-separated string), it's possible to + # specify just a few libraries to be loaded (not many are needed for the attach, + # but it can be tricky to pre-specify for all Linux versions as this may change + # across different versions). + # + # See: https://github.com/microsoft/debugpy/issues/762#issuecomment-947103844 + # for a comment that explains the basic steps on how to discover what should be available + # in each case (mostly trying different versions based on the output of gdb). + # + # The upside is that for cases when too many libraries are loaded the attach could be slower + # and just specifying the one that is actually needed for the attach can make it much faster. + # + # The downside is that it may be dependent on the Linux version being attached to (which is the + # reason why this is no longer done by default -- see: https://github.com/microsoft/debugpy/issues/882). + gdb_load_shared_libraries = os.environ.get('PYDEVD_GDB_SCAN_SHARED_LIBRARIES', '').strip() + if gdb_load_shared_libraries: + cmd.extend(["--init-eval-command='set auto-solib-add off'"]) # Don't scan all libraries. + + for lib in gdb_load_shared_libraries.split(','): + lib = lib.strip() + cmd.extend(["--eval-command='sharedlibrary %s'" % (lib,)]) # Scan the specified library + + cmd.extend(["--eval-command='set scheduler-locking off'"]) # If on we'll deadlock. + + # Leave auto by default (it should do the right thing as we're attaching to a process in the + # current host). + cmd.extend(["--eval-command='set architecture auto'"]) + + cmd.extend([ + "--eval-command='call (void*)dlopen(\"%s\", 2)'" % target_dll, + "--eval-command='sharedlibrary %s'" % target_dll_name, + "--eval-command='call (int)DoAttach(%s, \"%s\", %s)'" % ( + is_debug, python_code, show_debug_info) + ]) + + # print ' '.join(cmd) + + env = os.environ.copy() + # Remove the PYTHONPATH (if gdb has a builtin Python it could fail if we + # have the PYTHONPATH for a different python version or some forced encoding). + env.pop('PYTHONIOENCODING', None) + env.pop('PYTHONPATH', None) + print('Running: %s' % (' '.join(cmd))) + p = subprocess.Popen( + ' '.join(cmd), + shell=True, + env=env, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + print('Running gdb in target process.') + out, err = p.communicate() + print('stdout: %s' % (out,)) + print('stderr: %s' % (err,)) + return out, err + + +def find_helper_script(filedir, script_name): + target_filename = os.path.join(filedir, 'linux_and_mac', script_name) + target_filename = os.path.normpath(target_filename) + if not os.path.exists(target_filename): + raise RuntimeError('Could not find helper script: %s' % target_filename) + + return target_filename + + +def run_python_code_mac(pid, python_code, connect_debugger_tracing=False, show_debug_info=0): + assert '\'' not in python_code, 'Having a single quote messes with our command.' + + target_dll = get_target_filename() + if not target_dll: + raise RuntimeError('Could not find .dylib for attach to process.') + + libdir = os.path.dirname(__file__) + lldb_prepare_file = find_helper_script(libdir, 'lldb_prepare.py') + # Note: we currently don't support debug builds + + is_debug = 0 + # Note that the space in the beginning of each line in the multi-line is important! + cmd = [ + 'lldb', + '--no-lldbinit', # Do not automatically parse any '.lldbinit' files. + # '--attach-pid', + # str(pid), + # '--arch', + # arch, + '--script-language', + 'Python' + # '--batch-silent', + ] + + cmd.extend([ + "-o 'process attach --pid %d'" % pid, + "-o 'command script import \"%s\"'" % (lldb_prepare_file,), + "-o 'load_lib_and_attach \"%s\" %s \"%s\" %s'" % (target_dll, + is_debug, python_code, show_debug_info), + ]) + + cmd.extend([ + "-o 'process detach'", + "-o 'script import os; os._exit(1)'", + ]) + + # print ' '.join(cmd) + + env = os.environ.copy() + # Remove the PYTHONPATH (if gdb has a builtin Python it could fail if we + # have the PYTHONPATH for a different python version or some forced encoding). + env.pop('PYTHONIOENCODING', None) + env.pop('PYTHONPATH', None) + print('Running: %s' % (' '.join(cmd))) + p = subprocess.Popen( + ' '.join(cmd), + shell=True, + env=env, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + print('Running lldb in target process.') + out, err = p.communicate() + print('stdout: %s' % (out,)) + print('stderr: %s' % (err,)) + return out, err + + +if IS_WINDOWS: + run_python_code = run_python_code_windows +elif IS_MAC: + run_python_code = run_python_code_mac +elif IS_LINUX: + run_python_code = run_python_code_linux +else: + + def run_python_code(*args, **kwargs): + print('Unable to attach to process in platform: %s', sys.platform) + + +def test(): + print('Running with: %s' % (sys.executable,)) + code = ''' +import os, time, sys +print(os.getpid()) +#from threading import Thread +#Thread(target=str).start() +if __name__ == '__main__': + while True: + time.sleep(.5) + sys.stdout.write('.\\n') + sys.stdout.flush() +''' + + p = subprocess.Popen([sys.executable, '-u', '-c', code]) + try: + code = 'print("It worked!")\n' + + # Real code will be something as: + # code = '''import sys;sys.path.append(r'X:\winappdbg-code\examples'); import imported;''' + run_python_code(p.pid, python_code=code) + print('\nRun a 2nd time...\n') + run_python_code(p.pid, python_code=code) + + time.sleep(3) + finally: + p.kill() + + +def main(args): + # Otherwise, assume the first parameter is the pid and anything else is code to be executed + # in the target process. + pid = int(args[0]) + del args[0] + python_code = ';'.join(args) + + # Note: on Linux the python code may not have a single quote char: ' + run_python_code(pid, python_code) + + +if __name__ == '__main__': + args = sys.argv[1:] + if not args: + print('Expected pid and Python code to execute in target process.') + else: + if '--test' == args[0]: + test() + else: + main(args) + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_linux_amd64.so b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_linux_amd64.so new file mode 120000 index 0000000..eefee5c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_linux_amd64.so @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/21/b1/15/c508fe146dc8f92ebc8d48552349e163ffc17ea10aad62fd90cc78ca22 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_pydevd.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_pydevd.py new file mode 120000 index 0000000..bd76409 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_pydevd.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/10/72/b5/bd8d11dfa03e6597a7eaa3597ae7251be100d9facb8f51463a828baa7a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_script.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_script.py new file mode 100644 index 0000000..c8ad003 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_script.py @@ -0,0 +1,186 @@ + + +def get_main_thread_instance(threading): + if hasattr(threading, 'main_thread'): + return threading.main_thread() + else: + # On Python 2 we don't really have an API to get the main thread, + # so, we just get it from the 'shutdown' bound method. + return threading._shutdown.im_self + + +def get_main_thread_id(unlikely_thread_id=None): + ''' + :param unlikely_thread_id: + Pass to mark some thread id as not likely the main thread. + + :return tuple(thread_id, critical_warning) + ''' + import sys + import os + + current_frames = sys._current_frames() + possible_thread_ids = [] + for thread_ident, frame in current_frames.items(): + while frame.f_back is not None: + frame = frame.f_back + + basename = os.path.basename(frame.f_code.co_filename) + if basename.endswith(('.pyc', '.pyo')): + basename = basename[:-1] + + if (frame.f_code.co_name, basename) in [ + ('_run_module_as_main', 'runpy.py'), + ('_run_module_as_main', ''), + ('run_module_as_main', 'runpy.py'), + ('run_module', 'runpy.py'), + ('run_path', 'runpy.py'), + ]: + # This is the case for python -m (this is an ideal match, so, + # let's return it). + return thread_ident, '' + + if frame.f_code.co_name == '': + if frame.f_globals.get('__name__') == '__main__': + possible_thread_ids.insert(0, thread_ident) # Add with higher priority + continue + + # Usually the main thread will be started in the , whereas others would + # be started in another place (but when Python is embedded, this may not be + # correct, so, just add to the available possibilities as we'll have to choose + # one if there are multiple). + possible_thread_ids.append(thread_ident) + + if len(possible_thread_ids) > 0: + if len(possible_thread_ids) == 1: + return possible_thread_ids[0], '' # Ideal: only one match + + while unlikely_thread_id in possible_thread_ids: + possible_thread_ids.remove(unlikely_thread_id) + + if len(possible_thread_ids) == 1: + return possible_thread_ids[0], '' # Ideal: only one match + + elif len(possible_thread_ids) > 1: + # Bad: we can't really be certain of anything at this point. + return possible_thread_ids[0], \ + 'Multiple thread ids found (%s). Choosing main thread id randomly (%s).' % ( + possible_thread_ids, possible_thread_ids[0]) + + # If we got here we couldn't discover the main thread id. + return None, 'Unable to discover main thread id.' + + +def fix_main_thread_id(on_warn=lambda msg:None, on_exception=lambda msg:None, on_critical=lambda msg:None): + # This means that we weren't able to import threading in the main thread (which most + # likely means that the main thread is paused or in some very long operation). + # In this case we'll import threading here and hotfix what may be wrong in the threading + # module (if we're on Windows where we create a thread to do the attach and on Linux + # we are not certain on which thread we're executing this code). + # + # The code below is a workaround for https://bugs.python.org/issue37416 + import sys + import threading + + try: + with threading._active_limbo_lock: + main_thread_instance = get_main_thread_instance(threading) + + if sys.platform == 'win32': + # On windows this code would be called in a secondary thread, so, + # the current thread is unlikely to be the main thread. + if hasattr(threading, '_get_ident'): + unlikely_thread_id = threading._get_ident() # py2 + else: + unlikely_thread_id = threading.get_ident() # py3 + else: + unlikely_thread_id = None + + main_thread_id, critical_warning = get_main_thread_id(unlikely_thread_id) + + if main_thread_id is not None: + main_thread_id_attr = '_ident' + if not hasattr(main_thread_instance, main_thread_id_attr): + main_thread_id_attr = '_Thread__ident' + assert hasattr(main_thread_instance, main_thread_id_attr) + + if main_thread_id != getattr(main_thread_instance, main_thread_id_attr): + # Note that we also have to reset the '_tstack_lock' for a regular lock. + # This is needed to avoid an error on shutdown because this lock is bound + # to the thread state and will be released when the secondary thread + # that initialized the lock is finished -- making an assert fail during + # process shutdown. + main_thread_instance._tstate_lock = threading._allocate_lock() + main_thread_instance._tstate_lock.acquire() + + # Actually patch the thread ident as well as the threading._active dict + # (we should have the _active_limbo_lock to do that). + threading._active.pop(getattr(main_thread_instance, main_thread_id_attr), None) + setattr(main_thread_instance, main_thread_id_attr, main_thread_id) + threading._active[getattr(main_thread_instance, main_thread_id_attr)] = main_thread_instance + + # Note: only import from pydevd after the patching is done (we want to do the minimum + # possible when doing that patching). + on_warn('The threading module was not imported by user code in the main thread. The debugger will attempt to work around https://bugs.python.org/issue37416.') + + if critical_warning: + on_critical('Issue found when debugger was trying to work around https://bugs.python.org/issue37416:\n%s' % (critical_warning,)) + except: + on_exception('Error patching main thread id.') + + +def attach(port, host, protocol=''): + try: + import sys + fix_main_thread = 'threading' not in sys.modules + + if fix_main_thread: + + def on_warn(msg): + from _pydev_bundle import pydev_log + pydev_log.warn(msg) + + def on_exception(msg): + from _pydev_bundle import pydev_log + pydev_log.exception(msg) + + def on_critical(msg): + from _pydev_bundle import pydev_log + pydev_log.critical(msg) + + fix_main_thread_id(on_warn=on_warn, on_exception=on_exception, on_critical=on_critical) + + else: + from _pydev_bundle import pydev_log # @Reimport + pydev_log.debug('The threading module is already imported by user code.') + + if protocol: + from _pydevd_bundle import pydevd_defaults + pydevd_defaults.PydevdCustomization.DEFAULT_PROTOCOL = protocol + + import pydevd + + # I.e.: disconnect/reset if already connected. + + pydevd.SetupHolder.setup = None + + py_db = pydevd.get_global_debugger() + if py_db is not None: + py_db.dispose_and_kill_all_pydevd_threads(wait=False) + + # pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True + # pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3 + # pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3 + pydevd.settrace( + port=port, + host=host, + stdoutToServer=True, + stderrToServer=True, + overwrite_prev_trace=True, + suspend=False, + trace_only_current_thread=False, + patch_multiprocessing=False, + ) + except: + import traceback + traceback.print_exc() diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace.hpp new file mode 100644 index 0000000..510f916 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace.hpp @@ -0,0 +1,193 @@ +#ifndef _PY_CUSTOM_PYEVAL_SETTRACE_HPP_ +#define _PY_CUSTOM_PYEVAL_SETTRACE_HPP_ + +#include "python.h" +#include "py_utils.hpp" +#include "py_custom_pyeval_settrace_common.hpp" +#include "py_custom_pyeval_settrace_310.hpp" +#include "py_custom_pyeval_settrace_311.hpp" + +// On Python 3.7 onwards the thread state is not kept in PyThread_set_key_value (rather +// it uses PyThread_tss_set using PyThread_tss_set(&_PyRuntime.gilstate.autoTSSkey, (void *)tstate) +// and we don't have access to that key from here (thus, we can't use the previous approach which +// made CPython think that the current thread had the thread state where we wanted to set the tracing). +// +// So, the solution implemented here is not faking that change and reimplementing PyEval_SetTrace. +// The implementation is mostly the same from the one in CPython, but we have one shortcoming: +// +// When CPython sets the tracing for a thread it increments _Py_TracingPossible (actually +// _PyRuntime.ceval.tracing_possible). This implementation has one issue: it only works on +// deltas when the tracing is set (so, a settrace(func) will increase the _Py_TracingPossible global value and a +// settrace(None) will decrease it, but when a thread dies it's kept as is and is not decreased). +// -- as we don't currently have access to _PyRuntime we have to create a thread, set the tracing +// and let it die so that the count is increased (this is really hacky, but better than having +// to create a local copy of the whole _PyRuntime (defined in pystate.h with several inner structs) +// which would need to be kept up to date for each new CPython version just to increment that variable). + + + +/** + * This version is used in internalInitializeCustomPyEvalSetTrace->pyObject_FastCallDict on older + * versions of CPython (pre 3.7). + */ + static PyObject * + PyObject_FastCallDictCustom(PyObject* callback, PyObject *stack[3], int ignoredStackSizeAlways3, void* ignored) + { + PyObject *args = internalInitializeCustomPyEvalSetTrace->pyTuple_New(3); + PyObject *result; + + if (args == NULL) { + return NULL; + } + + IncRef(stack[0]); + IncRef(stack[1]); + IncRef(stack[2]); + + // I.e.: same thing as: PyTuple_SET_ITEM(args, 0, stack[0]); + reinterpret_cast(args)->ob_item[0] = stack[0]; + reinterpret_cast(args)->ob_item[1] = stack[1]; + reinterpret_cast(args)->ob_item[2] = stack[2]; + + /* call the Python-level function */ + result = internalInitializeCustomPyEvalSetTrace->pyEval_CallObjectWithKeywords(callback, args, (PyObject*)NULL); + + /* cleanup */ + DecRef(args, internalInitializeCustomPyEvalSetTrace->isDebug); + return result; +} + +static PyObject * +InternalCallTrampoline(PyObject* callback, + PyFrameObjectBaseUpTo39 *frame, int what, PyObject *arg) +{ + PyObject *result; + PyObject *stack[3]; + +// Note: this is commented out from CPython (we shouldn't need it and it adds a reasonable overhead). +// if (PyFrame_FastToLocalsWithError(frame) < 0) { +// return NULL; +// } +// + stack[0] = (PyObject *)frame; + stack[1] = InternalWhatstrings_37[what]; + stack[2] = (arg != NULL) ? arg : internalInitializeCustomPyEvalSetTrace->pyNone; + + + // Helpers to print info. + // printf("%s\n", internalInitializeCustomPyEvalSetTrace->pyUnicode_AsUTF8(internalInitializeCustomPyEvalSetTrace->pyObject_Repr((PyObject *)stack[0]))); + // printf("%s\n", internalInitializeCustomPyEvalSetTrace->pyUnicode_AsUTF8(internalInitializeCustomPyEvalSetTrace->pyObject_Repr((PyObject *)stack[1]))); + // printf("%s\n", internalInitializeCustomPyEvalSetTrace->pyUnicode_AsUTF8(internalInitializeCustomPyEvalSetTrace->pyObject_Repr((PyObject *)stack[2]))); + // printf("%s\n", internalInitializeCustomPyEvalSetTrace->pyUnicode_AsUTF8(internalInitializeCustomPyEvalSetTrace->pyObject_Repr((PyObject *)callback))); + + // call the Python-level function + // result = _PyObject_FastCall(callback, stack, 3); + // + // Note that _PyObject_FastCall is actually a define: + // #define _PyObject_FastCall(func, args, nargs) _PyObject_FastCallDict((func), (args), (nargs), NULL) + + result = internalInitializeCustomPyEvalSetTrace->pyObject_FastCallDict(callback, stack, 3, NULL); + + +// Note: this is commented out from CPython (we shouldn't need it and it adds a reasonable overhead). +// PyFrame_LocalsToFast(frame, 1); + + if (result == NULL) { + internalInitializeCustomPyEvalSetTrace->pyTraceBack_Here(frame); + } + + return result; +} + +static int +InternalTraceTrampoline(PyObject *self, PyFrameObject *frameParam, + int what, PyObject *arg) +{ + PyObject *callback; + PyObject *result; + + PyFrameObjectBaseUpTo39 *frame = reinterpret_cast(frameParam); + + if (what == PyTrace_CALL){ + callback = self; + } else { + callback = frame->f_trace; + } + + if (callback == NULL){ + return 0; + } + + result = InternalCallTrampoline(callback, frame, what, arg); + if (result == NULL) { + // Note: calling the original sys.settrace here. + internalInitializeCustomPyEvalSetTrace->pyEval_SetTrace(NULL, NULL); + PyObject *temp_f_trace = frame->f_trace; + frame->f_trace = NULL; + if(temp_f_trace != NULL){ + DecRef(temp_f_trace, internalInitializeCustomPyEvalSetTrace->isDebug); + } + return -1; + } + if (result != internalInitializeCustomPyEvalSetTrace->pyNone) { + PyObject *tmp = frame->f_trace; + frame->f_trace = result; + DecRef(tmp, internalInitializeCustomPyEvalSetTrace->isDebug); + } + else { + DecRef(result, internalInitializeCustomPyEvalSetTrace->isDebug); + } + return 0; +} + +// Based on ceval.c (PyEval_SetTrace(Py_tracefunc func, PyObject *arg)) +template +void InternalPySetTrace_Template(T tstate, PyObjectHolder* traceFunc, bool isDebug) +{ + PyObject *temp = tstate->c_traceobj; + + // We can't increase _Py_TracingPossible. Everything else should be equal to CPython. + // runtime->ceval.tracing_possible += (func != NULL) - (tstate->c_tracefunc != NULL); + + PyObject *arg = traceFunc->ToPython(); + IncRef(arg); + tstate->c_tracefunc = NULL; + tstate->c_traceobj = NULL; + /* Must make sure that profiling is not ignored if 'temp' is freed */ + tstate->use_tracing = tstate->c_profilefunc != NULL; + if(temp != NULL){ + DecRef(temp, isDebug); + } + tstate->c_tracefunc = InternalTraceTrampoline; + tstate->c_traceobj = arg; + /* Flag that tracing or profiling is turned on */ + tstate->use_tracing = ((InternalTraceTrampoline != NULL) + || (tstate->c_profilefunc != NULL)); + +}; + + +void InternalPySetTrace(PyThreadState* curThread, PyObjectHolder* traceFunc, bool isDebug, PythonVersion version) +{ + if (PyThreadState_25_27::IsFor(version)) { + InternalPySetTrace_Template(reinterpret_cast(curThread), traceFunc, isDebug); + } else if (PyThreadState_30_33::IsFor(version)) { + InternalPySetTrace_Template(reinterpret_cast(curThread), traceFunc, isDebug); + } else if (PyThreadState_34_36::IsFor(version)) { + InternalPySetTrace_Template(reinterpret_cast(curThread), traceFunc, isDebug); + } else if (PyThreadState_37_38::IsFor(version)) { + InternalPySetTrace_Template(reinterpret_cast(curThread), traceFunc, isDebug); + } else if (PyThreadState_39::IsFor(version)) { + InternalPySetTrace_Template(reinterpret_cast(curThread), traceFunc, isDebug); + } else if (PyThreadState_310::IsFor(version)) { + // 3.10 has other changes on the actual algorithm (use_tracing is per-frame now), so, we have a full new version for it. + InternalPySetTrace_Template310(reinterpret_cast(curThread), traceFunc, isDebug); + } else if (PyThreadState_311::IsFor(version)) { + InternalPySetTrace_Template311(reinterpret_cast(curThread), traceFunc, isDebug); + } else { + printf("Unable to set trace to target thread with Python version: %d", version); + } +} + + +#endif //_PY_CUSTOM_PYEVAL_SETTRACE_HPP_ \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_310.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_310.hpp new file mode 120000 index 0000000..a3d2cf4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_310.hpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/98/da/fa/2d52cf0e8091cb12cf4dd61bd095835d27d19fbc6e0333cecd95afa05b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_311.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_311.hpp new file mode 100644 index 0000000..d3086ad --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_311.hpp @@ -0,0 +1,120 @@ +#ifndef _PY_CUSTOM_PYEVAL_SETTRACE_311_HPP_ +#define _PY_CUSTOM_PYEVAL_SETTRACE_311_HPP_ + +#include "python.h" +#include "py_utils.hpp" + +static PyObject * +InternalCallTrampoline311(PyObject* callback, + PyFrameObject311 *frame, int what, PyObject *arg) +{ + PyObject *result; + PyObject *stack[3]; + +// Note: this is commented out from CPython (we shouldn't need it and it adds a reasonable overhead). +// if (PyFrame_FastToLocalsWithError(frame) < 0) { +// return NULL; +// } +// + stack[0] = (PyObject *)frame; + stack[1] = InternalWhatstrings_37[what]; + stack[2] = (arg != NULL) ? arg : internalInitializeCustomPyEvalSetTrace->pyNone; + + + // Helper to print info. + //printf("--- start\n"); + //printf("%s\n", internalInitializeCustomPyEvalSetTrace->pyUnicode_AsUTF8(internalInitializeCustomPyEvalSetTrace->pyObject_Repr((PyObject *)stack[0]))); + //printf("%s\n", internalInitializeCustomPyEvalSetTrace->pyUnicode_AsUTF8(internalInitializeCustomPyEvalSetTrace->pyObject_Repr((PyObject *)stack[1]))); + //printf("%s\n", internalInitializeCustomPyEvalSetTrace->pyUnicode_AsUTF8(internalInitializeCustomPyEvalSetTrace->pyObject_Repr((PyObject *)stack[2]))); + //printf("--- end\n"); + + result = internalInitializeCustomPyEvalSetTrace->pyObject_FastCallDict(callback, stack, 3, NULL); + +// Note: this is commented out from CPython (we shouldn't need it and it adds a reasonable overhead). +// PyFrame_LocalsToFast(frame, 1); + + if (result == NULL) { + internalInitializeCustomPyEvalSetTrace->pyTraceBack_Here(frame); + } + + return result; +} + +// See: static int trace_trampoline(PyObject *self, PyFrameObject *frame, int what, PyObject *arg) +// in: https://github.com/python/cpython/blob/3.11/Python/sysmodule.c +static int +InternalTraceTrampoline311(PyObject *self, PyFrameObject *frameParam, + int what, PyObject *arg) +{ + PyObject *callback; + PyObject *result; + + PyFrameObject311 *frame = reinterpret_cast(frameParam); + + if (what == PyTrace_CALL){ + callback = self; + } else { + callback = frame->f_trace; + } + + if (callback == NULL){ + return 0; + } + + result = InternalCallTrampoline311(callback, frame, what, arg); + if (result == NULL) { + // Note: calling the original sys.settrace here. + internalInitializeCustomPyEvalSetTrace->pyEval_SetTrace(NULL, NULL); + PyObject *temp_f_trace = frame->f_trace; + frame->f_trace = NULL; + if(temp_f_trace != NULL){ + DecRef(temp_f_trace, internalInitializeCustomPyEvalSetTrace->isDebug); + } + return -1; + } + if (result != internalInitializeCustomPyEvalSetTrace->pyNone) { + PyObject *tmp = frame->f_trace; + frame->f_trace = result; + DecRef(tmp, internalInitializeCustomPyEvalSetTrace->isDebug); + } + else { + DecRef(result, internalInitializeCustomPyEvalSetTrace->isDebug); + } + return 0; +} + +// Based on ceval.c (PyEval_SetTrace(Py_tracefunc func, PyObject *arg)) +// https://github.com/python/cpython/blob/3.11/Python/ceval.c +template +void InternalPySetTrace_Template311(T tstate, PyObjectHolder* traceFunc, bool isDebug) +{ + PyObject *traceobj = tstate->c_traceobj; + + PyObject *arg = traceFunc->ToPython(); + IncRef(arg); + tstate->c_tracefunc = NULL; + tstate->c_traceobj = NULL; + + // This is different (previously it was just: tstate->use_tracing, now + // this flag is per-frame). + int use_tracing = (tstate->c_profilefunc != NULL); + + // Note: before 3.11 this was just 1 or 0, now it needs to be 255 or 0. + tstate->cframe->use_tracing = (use_tracing ? 255 : 0); + + if(traceobj != NULL){ + DecRef(traceobj, isDebug); + } + tstate->c_tracefunc = InternalTraceTrampoline311; + tstate->c_traceobj = arg; + /* Flag that tracing or profiling is turned on */ + use_tracing = ((InternalTraceTrampoline311 != NULL) + || (tstate->c_profilefunc != NULL)); + + // Note: before 3.11 this was just 1 or 0, now it needs to be 255 or 0. + tstate->cframe->use_tracing = (use_tracing ? 255 : 0); + +}; + + +#endif //_PY_CUSTOM_PYEVAL_SETTRACE_311_HPP_ \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_common.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_common.hpp new file mode 120000 index 0000000..3253a1b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_common.hpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/af/57/21/e94830178af15bc7a188d9c0bc9135f150a8525d93ba33fb9d30b2d2f4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_settrace.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_settrace.hpp new file mode 100644 index 0000000..ba6c25f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_settrace.hpp @@ -0,0 +1,193 @@ +#ifndef _PY_SETTRACE_HPP_ +#define _PY_SETTRACE_HPP_ + +#include "ref_utils.hpp" +#include "py_utils.hpp" +#include "python.h" +#include "py_custom_pyeval_settrace.hpp" +#include + + +#ifdef _WIN32 + +typedef HMODULE MODULE_TYPE; +#else // LINUX ----------------------------------------------------------------- + +typedef void* MODULE_TYPE; +typedef ssize_t SSIZE_T; +typedef unsigned int DWORD; + +#endif + +DWORD GetPythonThreadId(PythonVersion version, PyThreadState* curThread) { + DWORD threadId = 0; + if (PyThreadState_25_27::IsFor(version)) { + threadId = (DWORD)((PyThreadState_25_27*)curThread)->thread_id; + } else if (PyThreadState_30_33::IsFor(version)) { + threadId = (DWORD)((PyThreadState_30_33*)curThread)->thread_id; + } else if (PyThreadState_34_36::IsFor(version)) { + threadId = (DWORD)((PyThreadState_34_36*)curThread)->thread_id; + } else if (PyThreadState_37_38::IsFor(version)) { + threadId = (DWORD)((PyThreadState_37_38*)curThread)->thread_id; + } else if (PyThreadState_39::IsFor(version)) { + threadId = (DWORD)((PyThreadState_39*)curThread)->thread_id; + } else if (PyThreadState_310::IsFor(version)) { + threadId = (DWORD)((PyThreadState_310*)curThread)->thread_id; + } else if (PyThreadState_311::IsFor(version)) { + threadId = (DWORD)((PyThreadState_311*)curThread)->thread_id; + } + return threadId; +} + + +/** + * This function may be called to set a tracing function to existing python threads. + */ +int InternalSetSysTraceFunc( + MODULE_TYPE module, + bool isDebug, + bool showDebugInfo, + PyObjectHolder* traceFunc, + PyObjectHolder* setTraceFunc, + unsigned int threadId, + PyObjectHolder* pyNone) +{ + + if(showDebugInfo){ + PRINT("InternalSetSysTraceFunc started."); + } + + DEFINE_PROC(isInit, Py_IsInitialized*, "Py_IsInitialized", 100); + if (!isInit()) { + PRINT("Py_IsInitialized returned false."); + return 110; + } + + auto version = GetPythonVersion(module); + + // found initialized Python runtime, gather and check the APIs we need. + + DEFINE_PROC(interpHead, PyInterpreterState_Head*, "PyInterpreterState_Head", 120); + DEFINE_PROC(gilEnsure, PyGILState_Ensure*, "PyGILState_Ensure", 130); + DEFINE_PROC(gilRelease, PyGILState_Release*, "PyGILState_Release", 140); + DEFINE_PROC(threadHead, PyInterpreterState_ThreadHead*, "PyInterpreterState_ThreadHead", 150); + DEFINE_PROC(threadNext, PyThreadState_Next*, "PyThreadState_Next", 160); + DEFINE_PROC(threadSwap, PyThreadState_Swap*, "PyThreadState_Swap", 170); + DEFINE_PROC(call, PyObject_CallFunctionObjArgs*, "PyObject_CallFunctionObjArgs", 180); + + PyInt_FromLong* intFromLong; + + if (version >= PythonVersion_30) { + DEFINE_PROC(intFromLongPy3, PyInt_FromLong*, "PyLong_FromLong", 190); + intFromLong = intFromLongPy3; + } else { + DEFINE_PROC(intFromLongPy2, PyInt_FromLong*, "PyInt_FromLong", 200); + intFromLong = intFromLongPy2; + } + + DEFINE_PROC(pyGetAttr, PyObject_GetAttrString*, "PyObject_GetAttrString", 250); + DEFINE_PROC(pyHasAttr, PyObject_HasAttrString*, "PyObject_HasAttrString", 260); + DEFINE_PROC_NO_CHECK(PyCFrame_Type, PyTypeObject*, "PyCFrame_Type", 300); // optional + + DEFINE_PROC_NO_CHECK(curPythonThread, PyThreadState**, "_PyThreadState_Current", 310); // optional + DEFINE_PROC_NO_CHECK(getPythonThread, _PyThreadState_UncheckedGet*, "_PyThreadState_UncheckedGet", 320); // optional + + if (curPythonThread == nullptr && getPythonThread == nullptr) { + // we're missing some APIs, we cannot attach. + PRINT("Error, missing Python threading API!!"); + return 330; + } + + auto head = interpHead(); + if (head == nullptr) { + // this interpreter is loaded but not initialized. + PRINT("Interpreter not initialized!"); + return 340; + } + + GilHolder gilLock(gilEnsure, gilRelease); // acquire and hold the GIL until done... + + int retVal = 0; + // find what index is holding onto the thread state... + auto curPyThread = getPythonThread ? getPythonThread() : *curPythonThread; + + if(curPyThread == nullptr){ + PRINT("Getting the current python thread returned nullptr."); + return 345; + } + + + // We do what PyEval_SetTrace does, but for any target thread. + PyUnicode_InternFromString* pyUnicode_InternFromString; + if (version >= PythonVersion_30) { + DEFINE_PROC(unicodeFromString, PyUnicode_InternFromString*, "PyUnicode_InternFromString", 520); + pyUnicode_InternFromString = unicodeFromString; + } else { + DEFINE_PROC(stringFromString, PyUnicode_InternFromString*, "PyString_InternFromString", 525); + pyUnicode_InternFromString = stringFromString; + } + + DEFINE_PROC_NO_CHECK(pyObject_FastCallDict, _PyObject_FastCallDict*, "_PyObject_FastCallDict", 530); + DEFINE_PROC(pyTuple_New, PyTuple_New*, "PyTuple_New", 531); + DEFINE_PROC(pyEval_CallObjectWithKeywords, PyEval_CallObjectWithKeywords*, "PyEval_CallObjectWithKeywords", 532); + + if(pyObject_FastCallDict == nullptr) { + DEFINE_PROC_NO_CHECK(pyObject_FastCallDict, _PyObject_FastCallDict*, "PyObject_VectorcallDict", 533); + } + + if(pyObject_FastCallDict == nullptr) { + // we have to use PyObject_FastCallDictCustom for older versions of CPython (pre 3.7). + pyObject_FastCallDict = reinterpret_cast<_PyObject_FastCallDict*>(&PyObject_FastCallDictCustom); + } + + + DEFINE_PROC(pyTraceBack_Here, PyTraceBack_Here*, "PyTraceBack_Here", 540); + DEFINE_PROC(pyEval_SetTrace, PyEval_SetTrace*, "PyEval_SetTrace", 550); + + // These are defined mostly for printing info while debugging, so, if they're not there, don't bother reporting. + DEFINE_PROC_NO_CHECK(pyObject_Repr, PyObject_Repr*, "PyObject_Repr", 551); + DEFINE_PROC_NO_CHECK(pyUnicode_AsUTF8, PyUnicode_AsUTF8*, "PyUnicode_AsUTF8", 552); + + + bool found = false; + for (PyThreadState* curThread = threadHead(head); curThread != nullptr; curThread = threadNext(curThread)) { + if (GetPythonThreadId(version, curThread) != threadId) { + continue; + } + found = true; + + if(showDebugInfo){ + printf("setting trace for thread: %d\n", threadId); + } + + if(!InternalIsTraceInitialized()) + { + InternalInitializeCustomPyEvalSetTrace *internalInitializeCustomPyEvalSetTrace = new InternalInitializeCustomPyEvalSetTrace(); + + IncRef(pyNone->ToPython()); + internalInitializeCustomPyEvalSetTrace->pyNone = pyNone->ToPython(); + + internalInitializeCustomPyEvalSetTrace->pyUnicode_InternFromString = pyUnicode_InternFromString; + internalInitializeCustomPyEvalSetTrace->pyObject_FastCallDict = pyObject_FastCallDict; + internalInitializeCustomPyEvalSetTrace->isDebug = isDebug; + internalInitializeCustomPyEvalSetTrace->pyTraceBack_Here = pyTraceBack_Here; + internalInitializeCustomPyEvalSetTrace->pyEval_SetTrace = pyEval_SetTrace; + internalInitializeCustomPyEvalSetTrace->pyTuple_New = pyTuple_New; + internalInitializeCustomPyEvalSetTrace->pyEval_CallObjectWithKeywords = pyEval_CallObjectWithKeywords; + internalInitializeCustomPyEvalSetTrace->pyObject_Repr = pyObject_Repr; + internalInitializeCustomPyEvalSetTrace->pyUnicode_AsUTF8 = pyUnicode_AsUTF8; + + InternalTraceInit(internalInitializeCustomPyEvalSetTrace); + } + InternalPySetTrace(curThread, traceFunc, isDebug, version); + break; + } + if(!found) { + retVal = 501; + } + + return retVal; + +} + +#endif // _PY_SETTRACE_HPP_ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_utils.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_utils.hpp new file mode 100644 index 0000000..97163a4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_utils.hpp @@ -0,0 +1,84 @@ +#ifndef _PY_UTILS_HPP_ +#define _PY_UTILS_HPP_ + +typedef int (Py_IsInitialized)(); +typedef PyInterpreterState* (PyInterpreterState_Head)(); +typedef enum { PyGILState_LOCKED, PyGILState_UNLOCKED } PyGILState_STATE; +typedef PyGILState_STATE(PyGILState_Ensure)(); +typedef void (PyGILState_Release)(PyGILState_STATE); +typedef int (PyRun_SimpleString)(const char *command); +typedef PyThreadState* (PyInterpreterState_ThreadHead)(PyInterpreterState* interp); +typedef PyThreadState* (PyThreadState_Next)(PyThreadState *tstate); +typedef PyThreadState* (PyThreadState_Swap)(PyThreadState *tstate); +typedef PyThreadState* (_PyThreadState_UncheckedGet)(); +typedef PyObject* (PyObject_CallFunctionObjArgs)(PyObject *callable, ...); // call w/ varargs, last arg should be nullptr +typedef PyObject* (PyInt_FromLong)(long); +typedef PyObject* (PyErr_Occurred)(); +typedef void (PyErr_Fetch)(PyObject **ptype, PyObject **pvalue, PyObject **ptraceback); +typedef void (PyErr_Restore)(PyObject *type, PyObject *value, PyObject *traceback); +typedef PyObject* (PyImport_ImportModule) (const char *name); +typedef PyObject* (PyImport_ImportModuleNoBlock) (const char *name); +typedef PyObject* (PyObject_GetAttrString)(PyObject *o, const char *attr_name); +typedef PyObject* (PyObject_HasAttrString)(PyObject *o, const char *attr_name); +typedef void* (PyThread_get_key_value)(int); +typedef int (PyThread_set_key_value)(int, void*); +typedef void (PyThread_delete_key_value)(int); +typedef int (PyObject_Not) (PyObject *o); +typedef PyObject* (PyDict_New)(); +typedef PyObject* (PyUnicode_InternFromString)(const char *u); +typedef PyObject * (_PyObject_FastCallDict)( + PyObject *callable, PyObject *const *args, Py_ssize_t nargs, PyObject *kwargs); +typedef int (PyTraceBack_Here)(PyFrameObject *frame); + +typedef PyObject* PyTuple_New(Py_ssize_t len); +typedef PyObject* PyEval_CallObjectWithKeywords(PyObject *callable, PyObject *args, PyObject *kwargs); + +typedef void (PyEval_SetTrace)(Py_tracefunc, PyObject *); +typedef int (*Py_tracefunc)(PyObject *, PyFrameObject *frame, int, PyObject *); +typedef int (_PyEval_SetTrace)(PyThreadState *tstate, Py_tracefunc func, PyObject *arg); + +typedef PyObject* PyObject_Repr(PyObject *); +typedef const char* PyUnicode_AsUTF8(PyObject *unicode); + +// holder to ensure we release the GIL even in error conditions +class GilHolder { + PyGILState_STATE _gilState; + PyGILState_Release* _release; +public: + GilHolder(PyGILState_Ensure* acquire, PyGILState_Release* release) { + _gilState = acquire(); + _release = release; + } + + ~GilHolder() { + _release(_gilState); + } +}; + +#ifdef _WIN32 + +#define PRINT(msg) {std::cout << msg << std::endl << std::flush;} + +#define DEFINE_PROC_NO_CHECK(func, funcType, funcNameStr, errorCode) \ + funcType func=reinterpret_cast(GetProcAddress(module, funcNameStr)); + +#define DEFINE_PROC(func, funcType, funcNameStr, errorCode) \ + DEFINE_PROC_NO_CHECK(func, funcType, funcNameStr, errorCode); \ + if(func == nullptr){std::cout << funcNameStr << " not found." << std::endl << std::flush; return errorCode;}; + +#else // LINUX ----------------------------------------------------------------- + +#define PRINT(msg) {printf(msg); printf("\n");} + +#define CHECK_NULL(ptr, msg, errorCode) if(ptr == nullptr){printf(msg); return errorCode;} + +#define DEFINE_PROC_NO_CHECK(func, funcType, funcNameStr, errorCode) \ + funcType func; *(void**)(&func) = dlsym(module, funcNameStr); + +#define DEFINE_PROC(func, funcType, funcNameStr, errorCode) \ + DEFINE_PROC_NO_CHECK(func, funcType, funcNameStr, errorCode); \ + if(func == nullptr){printf(funcNameStr); printf(" not found.\n"); return errorCode;}; + +#endif //_WIN32 + +#endif //_PY_UTILS_HPP_ \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_version.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_version.hpp new file mode 100644 index 0000000..be73692 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_version.hpp @@ -0,0 +1,90 @@ + +#ifndef __PY_VERSION_HPP__ +#define __PY_VERSION_HPP__ + + +#include + +enum PythonVersion { + PythonVersion_Unknown, + PythonVersion_25 = 0x0205, + PythonVersion_26 = 0x0206, + PythonVersion_27 = 0x0207, + PythonVersion_30 = 0x0300, + PythonVersion_31 = 0x0301, + PythonVersion_32 = 0x0302, + PythonVersion_33 = 0x0303, + PythonVersion_34 = 0x0304, + PythonVersion_35 = 0x0305, + PythonVersion_36 = 0x0306, + PythonVersion_37 = 0x0307, + PythonVersion_38 = 0x0308, + PythonVersion_39 = 0x0309, + PythonVersion_310 = 0x030A, + PythonVersion_311 = 0x030B +}; + + +#ifdef _WIN32 + +typedef const char* (GetVersionFunc)(); + +static PythonVersion GetPythonVersion(HMODULE hMod) { + auto versionFunc = reinterpret_cast(GetProcAddress(hMod, "Py_GetVersion")); + if (versionFunc == nullptr) { + return PythonVersion_Unknown; + } + const char* version = versionFunc(); + + +#else // LINUX ----------------------------------------------------------------- + +typedef const char* (*GetVersionFunc) (); + +static PythonVersion GetPythonVersion(void *module) { + GetVersionFunc versionFunc; + *(void**)(&versionFunc) = dlsym(module, "Py_GetVersion"); + if(versionFunc == nullptr) { + return PythonVersion_Unknown; + } + const char* version = versionFunc(); + +#endif //_WIN32 + + if (version != nullptr && strlen(version) >= 3 && version[1] == '.') { + if (version[0] == '2') { + switch (version[2]) { + case '5': return PythonVersion_25; + case '6': return PythonVersion_26; + case '7': return PythonVersion_27; + } + } + else if (version[0] == '3') { + switch (version[2]) { + case '0': return PythonVersion_30; + case '1': + if(strlen(version) >= 4){ + if(version[3] == '0'){ + return PythonVersion_310; + } + if(version[3] == '1'){ + return PythonVersion_311; + } + } + return PythonVersion_Unknown; // we don't care about 3.1 anymore... + + case '2': return PythonVersion_32; + case '3': return PythonVersion_33; + case '4': return PythonVersion_34; + case '5': return PythonVersion_35; + case '6': return PythonVersion_36; + case '7': return PythonVersion_37; + case '8': return PythonVersion_38; + case '9': return PythonVersion_39; + } + } + } + return PythonVersion_Unknown; +} + +#endif \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/python.h b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/python.h new file mode 100644 index 0000000..ce5dc98 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/python.h @@ -0,0 +1,704 @@ +// Python Tools for Visual Studio +// Copyright(c) Microsoft Corporation +// All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the License); you may not use +// this file except in compliance with the License. You may obtain a copy of the +// License at http://www.apache.org/licenses/LICENSE-2.0 +// +// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS +// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY +// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +// MERCHANTABILITY OR NON-INFRINGEMENT. +// +// See the Apache Version 2.0 License for specific language governing +// permissions and limitations under the License. + +#ifndef __PYTHON_H__ +#define __PYTHON_H__ + +#include "../common/py_version.hpp" + +#include + +#ifndef _WIN32 +typedef unsigned int DWORD; +typedef ssize_t SSIZE_T; +#endif +typedef SSIZE_T Py_ssize_t; + +// defines limited header of Python API for compatible access across a number of Pythons. + +class PyTypeObject; +class PyThreadState; + +#define PyObject_HEAD \ + size_t ob_refcnt; \ + PyTypeObject *ob_type; + +#define PyObject_VAR_HEAD \ + PyObject_HEAD \ + size_t ob_size; /* Number of items in variable part */ + +class PyObject { +public: + PyObject_HEAD +}; + +class PyVarObject : public PyObject { +public: + size_t ob_size; /* Number of items in variable part */ +}; + +// 2.5 - 3.7 +class PyFunctionObject : public PyObject { +public: + PyObject *func_code; /* A code object */ +}; + +// 2.5 - 2.7 compatible +class PyStringObject : public PyVarObject { +public: + long ob_shash; + int ob_sstate; + char ob_sval[1]; + + /* Invariants: + * ob_sval contains space for 'ob_size+1' elements. + * ob_sval[ob_size] == 0. + * ob_shash is the hash of the string or -1 if not computed yet. + * ob_sstate != 0 iff the string object is in stringobject.c's + * 'interned' dictionary; in this case the two references + * from 'interned' to this object are *not counted* in ob_refcnt. + */ +}; + +// 2.4 - 3.7 compatible +typedef struct { + PyObject_HEAD + size_t length; /* Length of raw Unicode data in buffer */ + wchar_t *str; /* Raw Unicode buffer */ + long hash; /* Hash value; -1 if not set */ +} PyUnicodeObject; + + +class PyFrameObject : public PyObject { + // After 3.10 we don't really have things we want to reuse common, so, + // create an empty base (it's not based on PyVarObject because on Python 3.11 + // it's just a PyObject and no longer a PyVarObject -- the part related to + // the var object must be declared in ech subclass in this case). +}; + +// 2.4 - 3.7 compatible +class PyFrameObjectBaseUpTo39 : public PyFrameObject { +public: + size_t ob_size; /* Number of items in variable part -- i.e.: PyVarObject*/ + + PyFrameObjectBaseUpTo39 *f_back; /* previous frame, or nullptr */ + PyObject *f_code; /* code segment */ + PyObject *f_builtins; /* builtin symbol table (PyDictObject) */ + PyObject *f_globals; /* global symbol table (PyDictObject) */ + PyObject *f_locals; /* local symbol table (any mapping) */ + PyObject **f_valuestack; /* points after the last local */ + /* Next free slot in f_valuestack. Frame creation sets to f_valuestack. + Frame evaluation usually NULLs it, but a frame that yields sets it + to the current stack top. */ + PyObject **f_stacktop; + PyObject *f_trace; /* Trace function */ + + // It has more things, but we're only interested in things up to f_trace. + +}; + + +// https://github.com/python/cpython/blob/3.10/Include/cpython/frameobject.h +class PyFrameObject310 : public PyFrameObject { +public: + size_t ob_size; /* Number of items in variable part -- i.e.: PyVarObject*/ + + PyFrameObject310 *f_back; /* previous frame, or NULL */ + PyObject *f_code; /* code segment */ + PyObject *f_builtins; /* builtin symbol table (PyDictObject) */ + PyObject *f_globals; /* global symbol table (PyDictObject) */ + PyObject *f_locals; /* local symbol table (any mapping) */ + PyObject **f_valuestack; /* points after the last local */ + PyObject *f_trace; /* Trace function */ + + // It has more things, but we're only interested in things up to f_trace. +}; + +typedef uint16_t _Py_CODEUNIT; + +// https://github.com/python/cpython/blob/3.11/Include/internal/pycore_frame.h +typedef struct _PyInterpreterFrame311 { + /* "Specials" section */ + PyFunctionObject *f_func; /* Strong reference */ + PyObject *f_globals; /* Borrowed reference */ + PyObject *f_builtins; /* Borrowed reference */ + PyObject *f_locals; /* Strong reference, may be NULL */ + void *f_code; /* Strong reference */ + void *frame_obj; /* Strong reference, may be NULL */ + /* Linkage section */ + struct _PyInterpreterFrame311 *previous; + // NOTE: This is not necessarily the last instruction started in the given + // frame. Rather, it is the code unit *prior to* the *next* instruction. For + // example, it may be an inline CACHE entry, an instruction we just jumped + // over, or (in the case of a newly-created frame) a totally invalid value: + _Py_CODEUNIT *prev_instr; + int stacktop; /* Offset of TOS from localsplus */ + bool is_entry; // Whether this is the "root" frame for the current _PyCFrame. + char owner; + /* Locals and stack */ + PyObject *localsplus[1]; +} _PyInterpreterFrame311; + +// https://github.com/python/cpython/blob/3.11/Include/internal/pycore_frame.h +// Note that in 3.11 it's no longer a "PyVarObject". +class PyFrameObject311 : public PyFrameObject { +public: + PyFrameObject311 *f_back; /* previous frame, or NULL */ + struct _PyInterpreterFrame311 *f_frame; /* points to the frame data */ + PyObject *f_trace; /* Trace function */ + int f_lineno; /* Current line number. Only valid if non-zero */ + char f_trace_lines; /* Emit per-line trace events? */ + char f_trace_opcodes; /* Emit per-opcode trace events? */ + char f_fast_as_locals; /* Have the fast locals of this frame been converted to a dict? */ + // It has more things, but we're not interested on those. +}; + + +typedef void (*destructor)(PyObject *); + +// 2.4 - 3.7 +class PyMethodDef { +public: + char *ml_name; /* The name of the built-in function/method */ +}; + + +// +// 2.5 - 3.7 +// While these are compatible there are fields only available on later versions. +class PyTypeObject : public PyVarObject { +public: + const char *tp_name; /* For printing, in format "." */ + size_t tp_basicsize, tp_itemsize; /* For allocation */ + + /* Methods to implement standard operations */ + + destructor tp_dealloc; + void *tp_print; + void *tp_getattr; + void *tp_setattr; + union { + void *tp_compare; /* 2.4 - 3.4 */ + void *tp_as_async; /* 3.5 - 3.7 */ + }; + void *tp_repr; + + /* Method suites for standard classes */ + + void *tp_as_number; + void *tp_as_sequence; + void *tp_as_mapping; + + /* More standard operations (here for binary compatibility) */ + + void *tp_hash; + void *tp_call; + void *tp_str; + void *tp_getattro; + void *tp_setattro; + + /* Functions to access object as input/output buffer */ + void *tp_as_buffer; + + /* Flags to define presence of optional/expanded features */ + long tp_flags; + + const char *tp_doc; /* Documentation string */ + + /* Assigned meaning in release 2.0 */ + /* call function for all accessible objects */ + void *tp_traverse; + + /* delete references to contained objects */ + void *tp_clear; + + /* Assigned meaning in release 2.1 */ + /* rich comparisons */ + void *tp_richcompare; + + /* weak reference enabler */ + size_t tp_weaklistoffset; + + /* Added in release 2.2 */ + /* Iterators */ + void *tp_iter; + void *tp_iternext; + + /* Attribute descriptor and subclassing stuff */ + PyMethodDef *tp_methods; + struct PyMemberDef *tp_members; + struct PyGetSetDef *tp_getset; + struct _typeobject *tp_base; + PyObject *tp_dict; + void *tp_descr_get; + void *tp_descr_set; + size_t tp_dictoffset; + void *tp_init; + void *tp_alloc; + void *tp_new; + void *tp_free; /* Low-level free-memory routine */ + void *tp_is_gc; /* For PyObject_IS_GC */ + PyObject *tp_bases; + PyObject *tp_mro; /* method resolution order */ + PyObject *tp_cache; + PyObject *tp_subclasses; + PyObject *tp_weaklist; + void *tp_del; + + /* Type attribute cache version tag. Added in version 2.6 */ + unsigned int tp_version_tag; +}; + +// 2.4 - 3.7 +class PyTupleObject : public PyVarObject { +public: + PyObject *ob_item[1]; + + /* ob_item contains space for 'ob_size' elements. + * Items must normally not be nullptr, except during construction when + * the tuple is not yet visible outside the function that builds it. + */ +}; + +// 2.4 - 3.7 +class PyCFunctionObject : public PyObject { +public: + PyMethodDef *m_ml; /* Description of the C function to call */ + PyObject *m_self; /* Passed as 'self' arg to the C func, can be nullptr */ + PyObject *m_module; /* The __module__ attribute, can be anything */ +}; + +typedef int (*Py_tracefunc)(PyObject *, PyFrameObject *, int, PyObject *); + +#define PyTrace_CALL 0 +#define PyTrace_EXCEPTION 1 +#define PyTrace_LINE 2 +#define PyTrace_RETURN 3 +#define PyTrace_C_CALL 4 +#define PyTrace_C_EXCEPTION 5 +#define PyTrace_C_RETURN 6 + +class PyInterpreterState { +}; + +class PyThreadState { }; + +class PyThreadState_25_27 : public PyThreadState { +public: + /* See Python/ceval.c for comments explaining most fields */ + + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObjectBaseUpTo39 *frame; + int recursion_depth; + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + + PyObject *dict; /* Stores per-thread state */ + + /* tick_counter is incremented whenever the check_interval ticker + * reaches zero. The purpose is to give a useful measure of the number + * of interpreted bytecode instructions in a given thread. This + * extremely lightweight statistic collector may be of interest to + * profilers (like psyco.jit()), although nothing in the core uses it. + */ + int tick_counter; + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + long thread_id; /* Thread id where this tstate was created */ + + /* XXX signal handlers should also be here */ + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 2 && (minorVersion >= 5 && minorVersion <= 7); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_25 && version <= PythonVersion_27; + } +}; + +class PyThreadState_30_33 : public PyThreadState { +public: + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObjectBaseUpTo39 *frame; + int recursion_depth; + char overflowed; /* The stack has overflowed. Allow 50 more calls + to handle the runtime error. */ + char recursion_critical; /* The current calls must not cause + a stack overflow. */ + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + + PyObject *dict; /* Stores per-thread state */ + + /* tick_counter is incremented whenever the check_interval ticker + * reaches zero. The purpose is to give a useful measure of the number + * of interpreted bytecode instructions in a given thread. This + * extremely lightweight statistic collector may be of interest to + * profilers (like psyco.jit()), although nothing in the core uses it. + */ + int tick_counter; + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + long thread_id; /* Thread id where this tstate was created */ + + /* XXX signal handlers should also be here */ + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && (minorVersion >= 0 && minorVersion <= 3); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_30 && version <= PythonVersion_33; + } +}; + +class PyThreadState_34_36 : public PyThreadState { +public: + PyThreadState *prev; + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObjectBaseUpTo39 *frame; + int recursion_depth; + char overflowed; /* The stack has overflowed. Allow 50 more calls + to handle the runtime error. */ + char recursion_critical; /* The current calls must not cause + a stack overflow. */ + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + + PyObject *dict; /* Stores per-thread state */ + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + + long thread_id; /* Thread id where this tstate was created */ + /* XXX signal handlers should also be here */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && minorVersion >= 4 && minorVersion <= 6; + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_34 && version <= PythonVersion_36; + } +}; + +struct _PyErr_StackItem { + PyObject *exc_type, *exc_value, *exc_traceback; + struct _PyErr_StackItem *previous_item; +}; + + +class PyThreadState_37_38 : public PyThreadState { +public: + PyThreadState *prev; + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObjectBaseUpTo39 *frame; + int recursion_depth; + char overflowed; /* The stack has overflowed. Allow 50 more calls + to handle the runtime error. */ + char recursion_critical; /* The current calls must not cause + a stack overflow. */ + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int stackcheck_counter; + + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + _PyErr_StackItem exc_state; + _PyErr_StackItem *exc_info; + + PyObject *dict; /* Stores per-thread state */ + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + + unsigned long thread_id; /* Thread id where this tstate was created */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && (minorVersion == 7 || minorVersion == 8); + } + + static bool IsFor(PythonVersion version) { + return version == PythonVersion_37 || version == PythonVersion_38; + } +}; + +// i.e.: https://github.com/python/cpython/blob/master/Include/cpython/pystate.h +class PyThreadState_39 : public PyThreadState { +public: + PyThreadState *prev; + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObjectBaseUpTo39 *frame; + int recursion_depth; + char overflowed; /* The stack has overflowed. Allow 50 more calls + to handle the runtime error. */ + int stackcheck_counter; + + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + _PyErr_StackItem exc_state; + _PyErr_StackItem *exc_info; + + PyObject *dict; /* Stores per-thread state */ + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + + unsigned long thread_id; /* Thread id where this tstate was created */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && minorVersion == 9; + } + + static bool IsFor(PythonVersion version) { + return version == PythonVersion_39; + } +}; + +typedef struct _cframe { + /* This struct will be threaded through the C stack + * allowing fast access to per-thread state that needs + * to be accessed quickly by the interpreter, but can + * be modified outside of the interpreter. + * + * WARNING: This makes data on the C stack accessible from + * heap objects. Care must be taken to maintain stack + * discipline and make sure that instances of this struct cannot + * accessed outside of their lifetime. + */ + int use_tracing; + struct _cframe *previous; +} CFrame; + +// i.e.: https://github.com/python/cpython/blob/master/Include/cpython/pystate.h +class PyThreadState_310 : public PyThreadState { +public: + PyThreadState *prev; + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObject310 *frame; + int recursion_depth; + int recursion_headroom; /* Allow 50 more calls to handle any errors. */ + int stackcheck_counter; + + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + + /* Pointer to current CFrame in the C stack frame of the currently, + * or most recently, executing _PyEval_EvalFrameDefault. */ + CFrame *cframe; + + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + _PyErr_StackItem exc_state; + _PyErr_StackItem *exc_info; + + PyObject *dict; /* Stores per-thread state */ + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + + unsigned long thread_id; /* Thread id where this tstate was created */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && minorVersion == 10; + } + + static bool IsFor(PythonVersion version) { + return version == PythonVersion_310; + } +}; + +// i.e.: https://github.com/python/cpython/blob/3.11/Include/cpython/pystate.h +class PyThreadState_311 : public PyThreadState { +public: + PyThreadState *prev; + PyThreadState *next; + PyInterpreterState *interp; + + int _initialized; + + int _static; + + int recursion_remaining; + int recursion_limit; + int recursion_headroom; /* Allow 50 more calls to handle any errors. */ + + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int tracing_what; + + /* Pointer to current CFrame in the C stack frame of the currently, + * or most recently, executing _PyEval_EvalFrameDefault. */ + CFrame *cframe; + + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + _PyErr_StackItem *exc_info; + + PyObject *dict; /* Stores per-thread state */ + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + + unsigned long thread_id; /* Thread id where this tstate was created */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && minorVersion == 11; + } + + static bool IsFor(PythonVersion version) { + return version == PythonVersion_311; + } +}; + +class PyIntObject : public PyObject { +public: + long ob_ival; +}; + +class Py3kLongObject : public PyVarObject { +public: + DWORD ob_digit[1]; +}; + +class PyOldStyleClassObject : public PyObject { +public: + PyObject *cl_bases; /* A tuple of class objects */ + PyObject *cl_dict; /* A dictionary */ + PyObject *cl_name; /* A string */ + /* The following three are functions or nullptr */ + PyObject *cl_getattr; + PyObject *cl_setattr; + PyObject *cl_delattr; +}; + +class PyInstanceObject : public PyObject { +public: + PyOldStyleClassObject *in_class; /* The class object */ + PyObject *in_dict; /* A dictionary */ + PyObject *in_weakreflist; /* List of weak references */ +}; + +#endif diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/ref_utils.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/ref_utils.hpp new file mode 120000 index 0000000..0b79a2c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/common/ref_utils.hpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f3/00/c5/424f57a009cad8474ce09f9112b29f058667f77b86e91c393826cbb00d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/.gitignore b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/.gitignore new file mode 120000 index 0000000..6eb16fc --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/.gitignore @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/af/7a/c3/6ad06e99bf5c443871df986c749a7d5113d49a3824ca70105622c8a11e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/__pycache__/lldb_prepare.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/__pycache__/lldb_prepare.cpython-38.pyc new file mode 100644 index 0000000..b5d2595 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/__pycache__/lldb_prepare.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/attach.cpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/attach.cpp new file mode 120000 index 0000000..fbed076 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/attach.cpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c4/6f/0d/98ec1fba15cdf77b29b86fee11f7b4b4e9f7da19e58c2a98e5fd73df9e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_linux.sh b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_linux.sh new file mode 120000 index 0000000..d26a4ea --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_linux.sh @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/61/b0/57/00cc1704413afa387872576a3d5f544a4eab37f7ce98675838966c1c2f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_mac.sh b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_mac.sh new file mode 120000 index 0000000..8e38219 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_mac.sh @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3e/be/bf/65b7efc91c7e92618d79d452234e99b5964f458c0f8f7553851ae894ab \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_manylinux.cmd b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_manylinux.cmd new file mode 120000 index 0000000..7408e0a --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_manylinux.cmd @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5b/dd/c2/e231be7c69f6eeb77561eb379de3f6ba92533bda8074a64f7fc72f4901 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/lldb_prepare.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/lldb_prepare.py new file mode 120000 index 0000000..17344f6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/lldb_prepare.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/79/29/8b/d4a2cb3ab1bcaf8449c9539ddd44948eca6559774a4820a79c6c8655da \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__init__.py new file mode 120000 index 0000000..4c13004 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d9/55/39/c07302d511252c725ae5cc0f568e9b2bcb110e272cf5c14cb6ac779aae \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..8137b21 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/breakpoint.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/breakpoint.cpython-38.pyc new file mode 100644 index 0000000..dceb165 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/breakpoint.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/compat.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/compat.cpython-38.pyc new file mode 100644 index 0000000..b5a3f55 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/compat.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/crash.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/crash.cpython-38.pyc new file mode 100644 index 0000000..6b160cb Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/crash.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/debug.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/debug.cpython-38.pyc new file mode 100644 index 0000000..5d02377 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/debug.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/disasm.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/disasm.cpython-38.pyc new file mode 100644 index 0000000..2b3f0c0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/disasm.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/event.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/event.cpython-38.pyc new file mode 100644 index 0000000..03f2a4b Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/event.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/interactive.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/interactive.cpython-38.pyc new file mode 100644 index 0000000..32e5fec Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/interactive.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/module.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/module.cpython-38.pyc new file mode 100644 index 0000000..1ba7d7d Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/module.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/process.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/process.cpython-38.pyc new file mode 100644 index 0000000..eade9b3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/process.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/registry.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/registry.cpython-38.pyc new file mode 100644 index 0000000..27a9780 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/registry.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/search.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/search.cpython-38.pyc new file mode 100644 index 0000000..5856e0e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/search.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/sql.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/sql.cpython-38.pyc new file mode 100644 index 0000000..92d9e7b Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/sql.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/system.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/system.cpython-38.pyc new file mode 100644 index 0000000..aaf9b75 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/system.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/textio.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/textio.cpython-38.pyc new file mode 100644 index 0000000..9d92a28 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/textio.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/thread.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/thread.cpython-38.pyc new file mode 100644 index 0000000..392de37 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/thread.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/util.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/util.cpython-38.pyc new file mode 100644 index 0000000..cc671ef Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/util.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/window.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/window.cpython-38.pyc new file mode 100644 index 0000000..26bd658 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__pycache__/window.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py new file mode 120000 index 0000000..dd7a86c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/d3/7e/0256014bc4bc642fe5397a902aad2db5d58e2a37e6469abc1127cd9fce \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/compat.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/compat.py new file mode 120000 index 0000000..b8a298f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/compat.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d0/a1/07/50933d1d53c993311afdfe9c59be8b07b89c92bef13aa2afb8e5638de0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/crash.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/crash.py new file mode 120000 index 0000000..0f37443 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/crash.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6c/0c/1e/d218cc7a975991f20449f0894b14093a70b10bbca9d38ea777a6c94c8d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/debug.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/debug.py new file mode 120000 index 0000000..56bdee4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/debug.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4c/8e/7d/53221ab01645ea2967f0e331b500a422fe2dea1457a986840fc6d14f38 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/disasm.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/disasm.py new file mode 120000 index 0000000..ebcc310 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/disasm.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7b/8e/6f/89d305915976b399eed98256e22139360d4a9d6a1b3560569db699b244 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/event.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/event.py new file mode 120000 index 0000000..072ce98 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/event.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/43/31/cf/09c1af119d4bacbdf775d55e80cb1be815114605414a2da7576d1a393b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/interactive.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/interactive.py new file mode 100644 index 0000000..b9d842f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/interactive.py @@ -0,0 +1,2242 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Acknowledgements: +# Nicolas Economou, for his command line debugger on which this is inspired. +# http://tinyurl.com/nicolaseconomou + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Interactive debugging console. + +@group Debugging: + ConsoleDebugger + +@group Exceptions: + CmdError +""" + +from __future__ import with_statement + +__revision__ = "$Id$" + +__all__ = [ 'ConsoleDebugger', 'CmdError' ] + +# TODO document this module with docstrings. +# TODO command to set a last error breakpoint. +# TODO command to show available plugins. + +from winappdbg import win32 +from winappdbg import compat +from winappdbg.system import System +from winappdbg.util import PathOperations +from winappdbg.event import EventHandler, NoEvent +from winappdbg.textio import HexInput, HexOutput, HexDump, CrashDump, DebugLog + +import os +import sys +import code +import time +import warnings +import traceback + +# too many variables named "cmd" to have a module by the same name :P +from cmd import Cmd + +# lazy imports +readline = None + +#============================================================================== + + +class DummyEvent (NoEvent): + "Dummy event object used internally by L{ConsoleDebugger}." + + def get_pid(self): + return self._pid + + def get_tid(self): + return self._tid + + def get_process(self): + return self._process + + def get_thread(self): + return self._thread + +#============================================================================== + + +class CmdError (Exception): + """ + Exception raised when a command parsing error occurs. + Used internally by L{ConsoleDebugger}. + """ + +#============================================================================== + + +class ConsoleDebugger (Cmd, EventHandler): + """ + Interactive console debugger. + + @see: L{Debug.interactive} + """ + +#------------------------------------------------------------------------------ +# Class variables + + # Exception to raise when an error occurs executing a command. + command_error_exception = CmdError + + # Milliseconds to wait for debug events in the main loop. + dwMilliseconds = 100 + + # History file name. + history_file = '.winappdbg_history' + + # Confirm before quitting? + confirm_quit = True + + # Valid plugin name characters. + valid_plugin_name_chars = 'ABCDEFGHIJKLMNOPQRSTUVWXY' \ + 'abcdefghijklmnopqrstuvwxy' \ + '012345678' \ + '_' + + # Names of the registers. + segment_names = ('cs', 'ds', 'es', 'fs', 'gs') + + register_alias_64_to_32 = { + 'eax':'Rax', 'ebx':'Rbx', 'ecx':'Rcx', 'edx':'Rdx', + 'eip':'Rip', 'ebp':'Rbp', 'esp':'Rsp', 'esi':'Rsi', 'edi':'Rdi' + } + register_alias_64_to_16 = { 'ax':'Rax', 'bx':'Rbx', 'cx':'Rcx', 'dx':'Rdx' } + register_alias_64_to_8_low = { 'al':'Rax', 'bl':'Rbx', 'cl':'Rcx', 'dl':'Rdx' } + register_alias_64_to_8_high = { 'ah':'Rax', 'bh':'Rbx', 'ch':'Rcx', 'dh':'Rdx' } + register_alias_32_to_16 = { 'ax':'Eax', 'bx':'Ebx', 'cx':'Ecx', 'dx':'Edx' } + register_alias_32_to_8_low = { 'al':'Eax', 'bl':'Ebx', 'cl':'Ecx', 'dl':'Edx' } + register_alias_32_to_8_high = { 'ah':'Eax', 'bh':'Ebx', 'ch':'Ecx', 'dh':'Edx' } + + register_aliases_full_32 = list(segment_names) + register_aliases_full_32.extend(compat.iterkeys(register_alias_32_to_16)) + register_aliases_full_32.extend(compat.iterkeys(register_alias_32_to_8_low)) + register_aliases_full_32.extend(compat.iterkeys(register_alias_32_to_8_high)) + register_aliases_full_32 = tuple(register_aliases_full_32) + + register_aliases_full_64 = list(segment_names) + register_aliases_full_64.extend(compat.iterkeys(register_alias_64_to_32)) + register_aliases_full_64.extend(compat.iterkeys(register_alias_64_to_16)) + register_aliases_full_64.extend(compat.iterkeys(register_alias_64_to_8_low)) + register_aliases_full_64.extend(compat.iterkeys(register_alias_64_to_8_high)) + register_aliases_full_64 = tuple(register_aliases_full_64) + + # Names of the control flow instructions. + jump_instructions = ( + 'jmp', 'jecxz', 'jcxz', + 'ja', 'jnbe', 'jae', 'jnb', 'jb', 'jnae', 'jbe', 'jna', 'jc', 'je', + 'jz', 'jnc', 'jne', 'jnz', 'jnp', 'jpo', 'jp', 'jpe', 'jg', 'jnle', + 'jge', 'jnl', 'jl', 'jnge', 'jle', 'jng', 'jno', 'jns', 'jo', 'js' + ) + call_instructions = ('call', 'ret', 'retn') + loop_instructions = ('loop', 'loopz', 'loopnz', 'loope', 'loopne') + control_flow_instructions = call_instructions + loop_instructions + \ + jump_instructions + +#------------------------------------------------------------------------------ +# Instance variables + + def __init__(self): + """ + Interactive console debugger. + + @see: L{Debug.interactive} + """ + Cmd.__init__(self) + EventHandler.__init__(self) + + # Quit the debugger when True. + self.debuggerExit = False + + # Full path to the history file. + self.history_file_full_path = None + + # Last executed command. + self.__lastcmd = "" + +#------------------------------------------------------------------------------ +# Debugger + + # Use this Debug object. + def start_using_debugger(self, debug): + + # Clear the previous Debug object. + self.stop_using_debugger() + + # Keep the Debug object. + self.debug = debug + + # Set ourselves as the event handler for the debugger. + self.prevHandler = debug.set_event_handler(self) + + # Stop using the Debug object given by start_using_debugger(). + # Circular references must be removed, or the destructors never get called. + def stop_using_debugger(self): + if hasattr(self, 'debug'): + debug = self.debug + debug.set_event_handler(self.prevHandler) + del self.prevHandler + del self.debug + return debug + return None + + # Destroy the Debug object. + def destroy_debugger(self, autodetach=True): + debug = self.stop_using_debugger() + if debug is not None: + if not autodetach: + debug.kill_all(bIgnoreExceptions=True) + debug.lastEvent = None + debug.stop() + del debug + + @property + def lastEvent(self): + return self.debug.lastEvent + + def set_fake_last_event(self, process): + if self.lastEvent is None: + self.debug.lastEvent = DummyEvent(self.debug) + self.debug.lastEvent._process = process + self.debug.lastEvent._thread = process.get_thread( + process.get_thread_ids()[0]) + self.debug.lastEvent._pid = process.get_pid() + self.debug.lastEvent._tid = self.lastEvent._thread.get_tid() + +#------------------------------------------------------------------------------ +# Input + +# TODO +# * try to guess breakpoints when insufficient data is given +# * child Cmd instances will have to be used for other prompts, for example +# when assembling or editing memory - it may also be a good idea to think +# if it's possible to make the main Cmd instance also a child, instead of +# the debugger itself - probably the same goes for the EventHandler, maybe +# it can be used as a contained object rather than a parent class. + + # Join a token list into an argument string. + def join_tokens(self, token_list): + return self.debug.system.argv_to_cmdline(token_list) + + # Split an argument string into a token list. + def split_tokens(self, arg, min_count=0, max_count=None): + token_list = self.debug.system.cmdline_to_argv(arg) + if len(token_list) < min_count: + raise CmdError("missing parameters.") + if max_count and len(token_list) > max_count: + raise CmdError("too many parameters.") + return token_list + + # Token is a thread ID or name. + def input_thread(self, token): + targets = self.input_thread_list([token]) + if len(targets) == 0: + raise CmdError("missing thread name or ID") + if len(targets) > 1: + msg = "more than one thread with that name:\n" + for tid in targets: + msg += "\t%d\n" % tid + msg = msg[:-len("\n")] + raise CmdError(msg) + return targets[0] + + # Token list is a list of thread IDs or names. + def input_thread_list(self, token_list): + targets = set() + system = self.debug.system + for token in token_list: + try: + tid = self.input_integer(token) + if not system.has_thread(tid): + raise CmdError("thread not found (%d)" % tid) + targets.add(tid) + except ValueError: + found = set() + for process in system.iter_processes(): + found.update(system.find_threads_by_name(token)) + if not found: + raise CmdError("thread not found (%s)" % token) + for thread in found: + targets.add(thread.get_tid()) + targets = list(targets) + targets.sort() + return targets + + # Token is a process ID or name. + def input_process(self, token): + targets = self.input_process_list([token]) + if len(targets) == 0: + raise CmdError("missing process name or ID") + if len(targets) > 1: + msg = "more than one process with that name:\n" + for pid in targets: + msg += "\t%d\n" % pid + msg = msg[:-len("\n")] + raise CmdError(msg) + return targets[0] + + # Token list is a list of process IDs or names. + def input_process_list(self, token_list): + targets = set() + system = self.debug.system + for token in token_list: + try: + pid = self.input_integer(token) + if not system.has_process(pid): + raise CmdError("process not found (%d)" % pid) + targets.add(pid) + except ValueError: + found = system.find_processes_by_filename(token) + if not found: + raise CmdError("process not found (%s)" % token) + for (process, _) in found: + targets.add(process.get_pid()) + targets = list(targets) + targets.sort() + return targets + + # Token is a command line to execute. + def input_command_line(self, command_line): + argv = self.debug.system.cmdline_to_argv(command_line) + if not argv: + raise CmdError("missing command line to execute") + fname = argv[0] + if not os.path.exists(fname): + try: + fname, _ = win32.SearchPath(None, fname, '.exe') + except WindowsError: + raise CmdError("file not found: %s" % fname) + argv[0] = fname + command_line = self.debug.system.argv_to_cmdline(argv) + return command_line + + # Token is an integer. + # Only hexadecimal format is supported. + def input_hexadecimal_integer(self, token): + return int(token, 0x10) + + # Token is an integer. + # It can be in any supported format. + def input_integer(self, token): + return HexInput.integer(token) +# # input_integer = input_hexadecimal_integer + + # Token is an address. + # The address can be a integer, a label or a register. + def input_address(self, token, pid=None, tid=None): + address = None + if self.is_register(token): + if tid is None: + if self.lastEvent is None or pid != self.lastEvent.get_pid(): + msg = "can't resolve register (%s) for unknown thread" + raise CmdError(msg % token) + tid = self.lastEvent.get_tid() + address = self.input_register(token, tid) + if address is None: + try: + address = self.input_hexadecimal_integer(token) + except ValueError: + if pid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + process = self.lastEvent.get_process() + elif self.lastEvent is not None and pid == self.lastEvent.get_pid(): + process = self.lastEvent.get_process() + else: + try: + process = self.debug.system.get_process(pid) + except KeyError: + raise CmdError("process not found (%d)" % pid) + try: + address = process.resolve_label(token) + except Exception: + raise CmdError("unknown address (%s)" % token) + return address + + # Token is an address range, or a single address. + # The addresses can be integers, labels or registers. + def input_address_range(self, token_list, pid=None, tid=None): + if len(token_list) == 2: + token_1, token_2 = token_list + address = self.input_address(token_1, pid, tid) + try: + size = self.input_integer(token_2) + except ValueError: + raise CmdError("bad address range: %s %s" % (token_1, token_2)) + elif len(token_list) == 1: + token = token_list[0] + if '-' in token: + try: + token_1, token_2 = token.split('-') + except Exception: + raise CmdError("bad address range: %s" % token) + address = self.input_address(token_1, pid, tid) + size = self.input_address(token_2, pid, tid) - address + else: + address = self.input_address(token, pid, tid) + size = None + return address, size + + # XXX TODO + # Support non-integer registers here. + def is_register(self, token): + if win32.arch == 'i386': + if token in self.register_aliases_full_32: + return True + token = token.title() + for (name, typ) in win32.CONTEXT._fields_: + if name == token: + return win32.sizeof(typ) == win32.sizeof(win32.DWORD) + elif win32.arch == 'amd64': + if token in self.register_aliases_full_64: + return True + token = token.title() + for (name, typ) in win32.CONTEXT._fields_: + if name == token: + return win32.sizeof(typ) == win32.sizeof(win32.DWORD64) + return False + + # The token is a register name. + # Returns None if no register name is matched. + def input_register(self, token, tid=None): + if tid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + thread = self.lastEvent.get_thread() + else: + thread = self.debug.system.get_thread(tid) + ctx = thread.get_context() + + token = token.lower() + title = token.title() + + if title in ctx: + return ctx.get(title) # eax -> Eax + + if ctx.arch == 'i386': + + if token in self.segment_names: + return ctx.get('Seg%s' % title) # cs -> SegCs + + if token in self.register_alias_32_to_16: + return ctx.get(self.register_alias_32_to_16[token]) & 0xFFFF + + if token in self.register_alias_32_to_8_low: + return ctx.get(self.register_alias_32_to_8_low[token]) & 0xFF + + if token in self.register_alias_32_to_8_high: + return (ctx.get(self.register_alias_32_to_8_high[token]) & 0xFF00) >> 8 + + elif ctx.arch == 'amd64': + + if token in self.segment_names: + return ctx.get('Seg%s' % title) # cs -> SegCs + + if token in self.register_alias_64_to_32: + return ctx.get(self.register_alias_64_to_32[token]) & 0xFFFFFFFF + + if token in self.register_alias_64_to_16: + return ctx.get(self.register_alias_64_to_16[token]) & 0xFFFF + + if token in self.register_alias_64_to_8_low: + return ctx.get(self.register_alias_64_to_8_low[token]) & 0xFF + + if token in self.register_alias_64_to_8_high: + return (ctx.get(self.register_alias_64_to_8_high[token]) & 0xFF00) >> 8 + + return None + + # Token list contains an address or address range. + # The prefix is also parsed looking for process and thread IDs. + def input_full_address_range(self, token_list): + pid, tid = self.get_process_and_thread_ids_from_prefix() + address, size = self.input_address_range(token_list, pid, tid) + return pid, tid, address, size + + # Token list contains a breakpoint. + def input_breakpoint(self, token_list): + pid, tid, address, size = self.input_full_address_range(token_list) + if not self.debug.is_debugee(pid): + raise CmdError("target process is not being debugged") + return pid, tid, address, size + + # Token list contains a memory address, and optional size and process. + # Sets the results as the default for the next display command. + def input_display(self, token_list, default_size=64): + pid, tid, address, size = self.input_full_address_range(token_list) + if not size: + size = default_size + next_address = HexOutput.integer(address + size) + self.default_display_target = next_address + return pid, tid, address, size + +#------------------------------------------------------------------------------ +# Output + + # Tell the user a module was loaded. + def print_module_load(self, event): + mod = event.get_module() + base = mod.get_base() + name = mod.get_filename() + if not name: + name = '' + msg = "Loaded module (%s) %s" + msg = msg % (HexDump.address(base), name) + print(msg) + + # Tell the user a module was unloaded. + def print_module_unload(self, event): + mod = event.get_module() + base = mod.get_base() + name = mod.get_filename() + if not name: + name = '' + msg = "Unloaded module (%s) %s" + msg = msg % (HexDump.address(base), name) + print(msg) + + # Tell the user a process was started. + def print_process_start(self, event): + pid = event.get_pid() + start = event.get_start_address() + if start: + start = HexOutput.address(start) + print("Started process %d at %s" % (pid, start)) + else: + print("Attached to process %d" % pid) + + # Tell the user a thread was started. + def print_thread_start(self, event): + tid = event.get_tid() + start = event.get_start_address() + if start: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + start = event.get_process().get_label_at_address(start) + print("Started thread %d at %s" % (tid, start)) + else: + print("Attached to thread %d" % tid) + + # Tell the user a process has finished. + def print_process_end(self, event): + pid = event.get_pid() + code = event.get_exit_code() + print("Process %d terminated, exit code %d" % (pid, code)) + + # Tell the user a thread has finished. + def print_thread_end(self, event): + tid = event.get_tid() + code = event.get_exit_code() + print("Thread %d terminated, exit code %d" % (tid, code)) + + # Print(debug strings. + def print_debug_string(self, event): + tid = event.get_tid() + string = event.get_debug_string() + print("Thread %d says: %r" % (tid, string)) + + # Inform the user of any other debugging event. + def print_event(self, event): + code = HexDump.integer(event.get_event_code()) + name = event.get_event_name() + desc = event.get_event_description() + if code in desc: + print('') + print("%s: %s" % (name, desc)) + else: + print('') + print("%s (%s): %s" % (name, code, desc)) + self.print_event_location(event) + + # Stop on exceptions and prompt for commands. + def print_exception(self, event): + address = HexDump.address(event.get_exception_address()) + code = HexDump.integer(event.get_exception_code()) + desc = event.get_exception_description() + if event.is_first_chance(): + chance = 'first' + else: + chance = 'second' + if code in desc: + msg = "%s at address %s (%s chance)" % (desc, address, chance) + else: + msg = "%s (%s) at address %s (%s chance)" % (desc, code, address, chance) + print('') + print(msg) + self.print_event_location(event) + + # Show the current location in the code. + def print_event_location(self, event): + process = event.get_process() + thread = event.get_thread() + self.print_current_location(process, thread) + + # Show the current location in the code. + def print_breakpoint_location(self, event): + process = event.get_process() + thread = event.get_thread() + pc = event.get_exception_address() + self.print_current_location(process, thread, pc) + + # Show the current location in any process and thread. + def print_current_location(self, process=None, thread=None, pc=None): + if not process: + if self.lastEvent is None: + raise CmdError("no current process set") + process = self.lastEvent.get_process() + if not thread: + if self.lastEvent is None: + raise CmdError("no current process set") + thread = self.lastEvent.get_thread() + thread.suspend() + try: + if pc is None: + pc = thread.get_pc() + ctx = thread.get_context() + finally: + thread.resume() + label = process.get_label_at_address(pc) + try: + disasm = process.disassemble(pc, 15) + except WindowsError: + disasm = None + except NotImplementedError: + disasm = None + print('') + print(CrashDump.dump_registers(ctx),) + print("%s:" % label) + if disasm: + print(CrashDump.dump_code_line(disasm[0], pc, bShowDump=True)) + else: + try: + data = process.peek(pc, 15) + except Exception: + data = None + if data: + print('%s: %s' % (HexDump.address(pc), HexDump.hexblock_byte(data))) + else: + print('%s: ???' % HexDump.address(pc)) + + # Display memory contents using a given method. + def print_memory_display(self, arg, method): + if not arg: + arg = self.default_display_target + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_display(token_list) + label = self.get_process(pid).get_label_at_address(address) + data = self.read_memory(address, size, pid) + if data: + print("%s:" % label) + print(method(data, address),) + +#------------------------------------------------------------------------------ +# Debugging + + # Get the process ID from the prefix or the last event. + def get_process_id_from_prefix(self): + if self.cmdprefix: + pid = self.input_process(self.cmdprefix) + else: + if self.lastEvent is None: + raise CmdError("no current process set") + pid = self.lastEvent.get_pid() + return pid + + # Get the thread ID from the prefix or the last event. + def get_thread_id_from_prefix(self): + if self.cmdprefix: + tid = self.input_thread(self.cmdprefix) + else: + if self.lastEvent is None: + raise CmdError("no current process set") + tid = self.lastEvent.get_tid() + return tid + + # Get the process from the prefix or the last event. + def get_process_from_prefix(self): + pid = self.get_process_id_from_prefix() + return self.get_process(pid) + + # Get the thread from the prefix or the last event. + def get_thread_from_prefix(self): + tid = self.get_thread_id_from_prefix() + return self.get_thread(tid) + + # Get the process and thread IDs from the prefix or the last event. + def get_process_and_thread_ids_from_prefix(self): + if self.cmdprefix: + try: + pid = self.input_process(self.cmdprefix) + tid = None + except CmdError: + try: + tid = self.input_thread(self.cmdprefix) + pid = self.debug.system.get_thread(tid).get_pid() + except CmdError: + msg = "unknown process or thread (%s)" % self.cmdprefix + raise CmdError(msg) + else: + if self.lastEvent is None: + raise CmdError("no current process set") + pid = self.lastEvent.get_pid() + tid = self.lastEvent.get_tid() + return pid, tid + + # Get the process and thread from the prefix or the last event. + def get_process_and_thread_from_prefix(self): + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + thread = self.get_thread(tid) + return process, thread + + # Get the process object. + def get_process(self, pid=None): + if pid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + process = self.lastEvent.get_process() + elif self.lastEvent is not None and pid == self.lastEvent.get_pid(): + process = self.lastEvent.get_process() + else: + try: + process = self.debug.system.get_process(pid) + except KeyError: + raise CmdError("process not found (%d)" % pid) + return process + + # Get the thread object. + def get_thread(self, tid=None): + if tid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + thread = self.lastEvent.get_thread() + elif self.lastEvent is not None and tid == self.lastEvent.get_tid(): + thread = self.lastEvent.get_thread() + else: + try: + thread = self.debug.system.get_thread(tid) + except KeyError: + raise CmdError("thread not found (%d)" % tid) + return thread + + # Read the process memory. + def read_memory(self, address, size, pid=None): + process = self.get_process(pid) + try: + data = process.peek(address, size) + except WindowsError: + orig_address = HexOutput.integer(address) + next_address = HexOutput.integer(address + size) + msg = "error reading process %d, from %s to %s (%d bytes)" + msg = msg % (pid, orig_address, next_address, size) + raise CmdError(msg) + return data + + # Write the process memory. + def write_memory(self, address, data, pid=None): + process = self.get_process(pid) + try: + process.write(address, data) + except WindowsError: + size = len(data) + orig_address = HexOutput.integer(address) + next_address = HexOutput.integer(address + size) + msg = "error reading process %d, from %s to %s (%d bytes)" + msg = msg % (pid, orig_address, next_address, size) + raise CmdError(msg) + + # Change a register value. + def change_register(self, register, value, tid=None): + + # Get the thread. + if tid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + thread = self.lastEvent.get_thread() + else: + try: + thread = self.debug.system.get_thread(tid) + except KeyError: + raise CmdError("thread not found (%d)" % tid) + + # Convert the value to integer type. + try: + value = self.input_integer(value) + except ValueError: + pid = thread.get_pid() + value = self.input_address(value, pid, tid) + + # Suspend the thread. + # The finally clause ensures the thread is resumed before returning. + thread.suspend() + try: + + # Get the current context. + ctx = thread.get_context() + + # Register name matching is case insensitive. + register = register.lower() + + # Integer 32 bits registers. + if register in self.register_names: + register = register.title() # eax -> Eax + + # Segment (16 bit) registers. + if register in self.segment_names: + register = 'Seg%s' % register.title() # cs -> SegCs + value = value & 0x0000FFFF + + # Integer 16 bits registers. + if register in self.register_alias_16: + register = self.register_alias_16[register] + previous = ctx.get(register) & 0xFFFF0000 + value = (value & 0x0000FFFF) | previous + + # Integer 8 bits registers (low part). + if register in self.register_alias_8_low: + register = self.register_alias_8_low[register] + previous = ctx.get(register) % 0xFFFFFF00 + value = (value & 0x000000FF) | previous + + # Integer 8 bits registers (high part). + if register in self.register_alias_8_high: + register = self.register_alias_8_high[register] + previous = ctx.get(register) % 0xFFFF00FF + value = ((value & 0x000000FF) << 8) | previous + + # Set the new context. + ctx.__setitem__(register, value) + thread.set_context(ctx) + + # Resume the thread. + finally: + thread.resume() + + # Very crude way to find data within the process memory. + # TODO: Perhaps pfind.py can be integrated here instead. + def find_in_memory(self, query, process): + for mbi in process.get_memory_map(): + if mbi.State != win32.MEM_COMMIT or mbi.Protect & win32.PAGE_GUARD: + continue + address = mbi.BaseAddress + size = mbi.RegionSize + try: + data = process.read(address, size) + except WindowsError: + msg = "*** Warning: read error at address %s" + msg = msg % HexDump.address(address) + print(msg) + width = min(len(query), 16) + p = data.find(query) + while p >= 0: + q = p + len(query) + d = data[ p: min(q, p + width) ] + h = HexDump.hexline(d, width=width) + a = HexDump.address(address + p) + print("%s: %s" % (a, h)) + p = data.find(query, q) + + # Kill a process. + def kill_process(self, pid): + process = self.debug.system.get_process(pid) + try: + process.kill() + if self.debug.is_debugee(pid): + self.debug.detach(pid) + print("Killed process (%d)" % pid) + except Exception: + print("Error trying to kill process (%d)" % pid) + + # Kill a thread. + def kill_thread(self, tid): + thread = self.debug.system.get_thread(tid) + try: + thread.kill() + process = thread.get_process() + pid = process.get_pid() + if self.debug.is_debugee(pid) and not process.is_alive(): + self.debug.detach(pid) + print("Killed thread (%d)" % tid) + except Exception: + print("Error trying to kill thread (%d)" % tid) + +#------------------------------------------------------------------------------ +# Command prompt input + + # Prompt the user for commands. + def prompt_user(self): + while not self.debuggerExit: + try: + self.cmdloop() + break + except CmdError: + e = sys.exc_info()[1] + print("*** Error: %s" % str(e)) + except Exception: + traceback.print_exc() +# # self.debuggerExit = True + + # Prompt the user for a YES/NO kind of question. + def ask_user(self, msg, prompt="Are you sure? (y/N): "): + print(msg) + answer = raw_input(prompt) + answer = answer.strip()[:1].lower() + return answer == 'y' + + # Autocomplete the given command when not ambiguous. + # Convert it to lowercase (so commands are seen as case insensitive). + def autocomplete(self, cmd): + cmd = cmd.lower() + completed = self.completenames(cmd) + if len(completed) == 1: + cmd = completed[0] + return cmd + + # Get the help text for the given list of command methods. + # Note it's NOT a list of commands, but a list of actual method names. + # Each line of text is stripped and all lines are sorted. + # Repeated text lines are removed. + # Returns a single, possibly multiline, string. + def get_help(self, commands): + msg = set() + for name in commands: + if name != 'do_help': + try: + doc = getattr(self, name).__doc__.split('\n') + except Exception: + return ("No help available when Python" + " is run with the -OO switch.") + for x in doc: + x = x.strip() + if x: + msg.add(' %s' % x) + msg = list(msg) + msg.sort() + msg = '\n'.join(msg) + return msg + + # Parse the prefix and remove it from the command line. + def split_prefix(self, line): + prefix = None + if line.startswith('~'): + pos = line.find(' ') + if pos == 1: + pos = line.find(' ', pos + 1) + if not pos < 0: + prefix = line[ 1: pos ].strip() + line = line[ pos: ].strip() + return prefix, line + +#------------------------------------------------------------------------------ +# Cmd() hacks + + # Header for help page. + doc_header = 'Available commands (type help * or help )' + +# # # Read and write directly to stdin and stdout. +# # # This prevents the use of raw_input and print. +# # use_rawinput = False + + @property + def prompt(self): + if self.lastEvent: + pid = self.lastEvent.get_pid() + tid = self.lastEvent.get_tid() + if self.debug.is_debugee(pid): +# # return '~%d(%d)> ' % (tid, pid) + return '%d:%d> ' % (pid, tid) + return '> ' + + # Return a sorted list of method names. + # Only returns the methods that implement commands. + def get_names(self): + names = Cmd.get_names(self) + names = [ x for x in set(names) if x.startswith('do_') ] + names.sort() + return names + + # Automatically autocomplete commands, even if Tab wasn't pressed. + # The prefix is removed from the line and stored in self.cmdprefix. + # Also implement the commands that consist of a symbol character. + def parseline(self, line): + self.cmdprefix, line = self.split_prefix(line) + line = line.strip() + if line: + if line[0] == '.': + line = 'plugin ' + line[1:] + elif line[0] == '#': + line = 'python ' + line[1:] + cmd, arg, line = Cmd.parseline(self, line) + if cmd: + cmd = self.autocomplete(cmd) + return cmd, arg, line + +# # # Don't repeat the last executed command. +# # def emptyline(self): +# # pass + + # Reset the defaults for some commands. + def preloop(self): + self.default_disasm_target = 'eip' + self.default_display_target = 'eip' + self.last_display_command = self.do_db + + # Put the prefix back in the command line. + def get_lastcmd(self): + return self.__lastcmd + + def set_lastcmd(self, lastcmd): + if self.cmdprefix: + lastcmd = '~%s %s' % (self.cmdprefix, lastcmd) + self.__lastcmd = lastcmd + + lastcmd = property(get_lastcmd, set_lastcmd) + + # Quit the command prompt if the debuggerExit flag is on. + def postcmd(self, stop, line): + return stop or self.debuggerExit + +#------------------------------------------------------------------------------ +# Commands + + # Each command contains a docstring with it's help text. + # The help text consist of independent text lines, + # where each line shows a command and it's parameters. + # Each command method has the help message for itself and all it's aliases. + # Only the docstring for the "help" command is shown as-is. + + # NOTE: Command methods MUST be all lowercase! + + # Extended help command. + def do_help(self, arg): + """ + ? - show the list of available commands + ? * - show help for all commands + ? [command...] - show help for the given command(s) + help - show the list of available commands + help * - show help for all commands + help [command...] - show help for the given command(s) + """ + if not arg: + Cmd.do_help(self, arg) + elif arg in ('?', 'help'): + # An easter egg :) + print(" Help! I need somebody...") + print(" Help! Not just anybody...") + print(" Help! You know, I need someone...") + print(" Heeelp!") + else: + if arg == '*': + commands = self.get_names() + commands = [ x for x in commands if x.startswith('do_') ] + else: + commands = set() + for x in arg.split(' '): + x = x.strip() + if x: + for n in self.completenames(x): + commands.add('do_%s' % n) + commands = list(commands) + commands.sort() + print(self.get_help(commands)) + + def do_shell(self, arg): + """ + ! - spawn a system shell + shell - spawn a system shell + ! [arguments...] - execute a single shell command + shell [arguments...] - execute a single shell command + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + + # Try to use the environment to locate cmd.exe. + # If not found, it's usually OK to just use the filename, + # since cmd.exe is one of those "magic" programs that + # can be automatically found by CreateProcess. + shell = os.getenv('ComSpec', 'cmd.exe') + + # When given a command, run it and return. + # When no command is given, spawn a shell. + if arg: + arg = '%s /c %s' % (shell, arg) + else: + arg = shell + process = self.debug.system.start_process(arg, bConsole=True) + process.wait() + + # This hack fixes a bug in Python, the interpreter console is closing the + # stdin pipe when calling the exit() function (Ctrl+Z seems to work fine). + class _PythonExit(object): + + def __repr__(self): + return "Use exit() or Ctrl-Z plus Return to exit" + + def __call__(self): + raise SystemExit() + + _python_exit = _PythonExit() + + # Spawns a Python shell with some handy local variables and the winappdbg + # module already imported. Also the console banner is improved. + def _spawn_python_shell(self, arg): + import winappdbg + banner = ('Python %s on %s\nType "help", "copyright", ' + '"credits" or "license" for more information.\n') + platform = winappdbg.version.lower() + platform = 'WinAppDbg %s' % platform + banner = banner % (sys.version, platform) + local = {} + local.update(__builtins__) + local.update({ + '__name__': '__console__', + '__doc__': None, + 'exit': self._python_exit, + 'self': self, + 'arg': arg, + 'winappdbg': winappdbg, + }) + try: + code.interact(banner=banner, local=local) + except SystemExit: + # We need to catch it so it doesn't kill our program. + pass + + def do_python(self, arg): + """ + # - spawn a python interpreter + python - spawn a python interpreter + # - execute a single python statement + python - execute a single python statement + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + + # When given a Python statement, execute it directly. + if arg: + try: + compat.exec_(arg, globals(), locals()) + except Exception: + traceback.print_exc() + + # When no statement is given, spawn a Python interpreter. + else: + try: + self._spawn_python_shell(arg) + except Exception: + e = sys.exc_info()[1] + raise CmdError( + "unhandled exception when running Python console: %s" % e) + + def do_quit(self, arg): + """ + quit - close the debugging session + q - close the debugging session + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + if self.confirm_quit: + count = self.debug.get_debugee_count() + if count > 0: + if count == 1: + msg = "There's a program still running." + else: + msg = "There are %s programs still running." % count + if not self.ask_user(msg): + return False + self.debuggerExit = True + return True + + do_q = do_quit + + def do_attach(self, arg): + """ + attach [target...] - attach to the given process(es) + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + targets = self.input_process_list(self.split_tokens(arg, 1)) + if not targets: + print("Error: missing parameters") + else: + debug = self.debug + for pid in targets: + try: + debug.attach(pid) + print("Attached to process (%d)" % pid) + except Exception: + print("Error: can't attach to process (%d)" % pid) + + def do_detach(self, arg): + """ + [~process] detach - detach from the current process + detach - detach from the current process + detach [target...] - detach from the given process(es) + """ + debug = self.debug + token_list = self.split_tokens(arg) + if self.cmdprefix: + token_list.insert(0, self.cmdprefix) + targets = self.input_process_list(token_list) + if not targets: + if self.lastEvent is None: + raise CmdError("no current process set") + targets = [ self.lastEvent.get_pid() ] + for pid in targets: + try: + debug.detach(pid) + print("Detached from process (%d)" % pid) + except Exception: + print("Error: can't detach from process (%d)" % pid) + + def do_windowed(self, arg): + """ + windowed [arguments...] - run a windowed program for debugging + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + cmdline = self.input_command_line(arg) + try: + process = self.debug.execl(arg, + bConsole=False, + bFollow=self.options.follow) + print("Spawned process (%d)" % process.get_pid()) + except Exception: + raise CmdError("can't execute") + self.set_fake_last_event(process) + + def do_console(self, arg): + """ + console [arguments...] - run a console program for debugging + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + cmdline = self.input_command_line(arg) + try: + process = self.debug.execl(arg, + bConsole=True, + bFollow=self.options.follow) + print("Spawned process (%d)" % process.get_pid()) + except Exception: + raise CmdError("can't execute") + self.set_fake_last_event(process) + + def do_continue(self, arg): + """ + continue - continue execution + g - continue execution + go - continue execution + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + if self.debug.get_debugee_count() > 0: + return True + + do_g = do_continue + do_go = do_continue + + def do_gh(self, arg): + """ + gh - go with exception handled + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + if self.lastEvent: + self.lastEvent.continueStatus = win32.DBG_EXCEPTION_HANDLED + return self.do_go(arg) + + def do_gn(self, arg): + """ + gn - go with exception not handled + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + if self.lastEvent: + self.lastEvent.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED + return self.do_go(arg) + + def do_refresh(self, arg): + """ + refresh - refresh the list of running processes and threads + [~process] refresh - refresh the list of running threads + """ + if arg: + raise CmdError("too many arguments") + if self.cmdprefix: + process = self.get_process_from_prefix() + process.scan() + else: + self.debug.system.scan() + + def do_processlist(self, arg): + """ + pl - show the processes being debugged + processlist - show the processes being debugged + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + system = self.debug.system + pid_list = self.debug.get_debugee_pids() + if pid_list: + print("Process ID File name") + for pid in pid_list: + if pid == 0: + filename = "System Idle Process" + elif pid == 4: + filename = "System" + else: + filename = system.get_process(pid).get_filename() + filename = PathOperations.pathname_to_filename(filename) + print("%-12d %s" % (pid, filename)) + + do_pl = do_processlist + + def do_threadlist(self, arg): + """ + tl - show the threads being debugged + threadlist - show the threads being debugged + """ + if arg: + raise CmdError("too many arguments") + if self.cmdprefix: + process = self.get_process_from_prefix() + for thread in process.iter_threads(): + tid = thread.get_tid() + name = thread.get_name() + print("%-12d %s" % (tid, name)) + else: + system = self.debug.system + pid_list = self.debug.get_debugee_pids() + if pid_list: + print("Thread ID Thread name") + for pid in pid_list: + process = system.get_process(pid) + for thread in process.iter_threads(): + tid = thread.get_tid() + name = thread.get_name() + print("%-12d %s" % (tid, name)) + + do_tl = do_threadlist + + def do_kill(self, arg): + """ + [~process] kill - kill a process + [~thread] kill - kill a thread + kill - kill the current process + kill * - kill all debugged processes + kill - kill the given processes and threads + """ + if arg: + if arg == '*': + target_pids = self.debug.get_debugee_pids() + target_tids = list() + else: + target_pids = set() + target_tids = set() + if self.cmdprefix: + pid, tid = self.get_process_and_thread_ids_from_prefix() + if tid is None: + target_tids.add(tid) + else: + target_pids.add(pid) + for token in self.split_tokens(arg): + try: + pid = self.input_process(token) + target_pids.add(pid) + except CmdError: + try: + tid = self.input_process(token) + target_pids.add(pid) + except CmdError: + msg = "unknown process or thread (%s)" % token + raise CmdError(msg) + target_pids = list(target_pids) + target_tids = list(target_tids) + target_pids.sort() + target_tids.sort() + msg = "You are about to kill %d processes and %d threads." + msg = msg % (len(target_pids), len(target_tids)) + if self.ask_user(msg): + for pid in target_pids: + self.kill_process(pid) + for tid in target_tids: + self.kill_thread(tid) + else: + if self.cmdprefix: + pid, tid = self.get_process_and_thread_ids_from_prefix() + if tid is None: + if self.lastEvent is not None and pid == self.lastEvent.get_pid(): + msg = "You are about to kill the current process." + else: + msg = "You are about to kill process %d." % pid + if self.ask_user(msg): + self.kill_process(pid) + else: + if self.lastEvent is not None and tid == self.lastEvent.get_tid(): + msg = "You are about to kill the current thread." + else: + msg = "You are about to kill thread %d." % tid + if self.ask_user(msg): + self.kill_thread(tid) + else: + if self.lastEvent is None: + raise CmdError("no current process set") + pid = self.lastEvent.get_pid() + if self.ask_user("You are about to kill the current process."): + self.kill_process(pid) + + # TODO: create hidden threads using undocumented API calls. + def do_modload(self, arg): + """ + [~process] modload - load a DLL module + """ + filename = self.split_tokens(arg, 1, 1)[0] + process = self.get_process_from_prefix() + try: + process.inject_dll(filename, bWait=False) + except RuntimeError: + print("Can't inject module: %r" % filename) + + # TODO: modunload + + def do_stack(self, arg): + """ + [~thread] k - show the stack trace + [~thread] stack - show the stack trace + """ + if arg: # XXX TODO add depth parameter + raise CmdError("too many arguments") + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + thread = process.get_thread(tid) + try: + stack_trace = thread.get_stack_trace_with_labels() + if stack_trace: + print(CrashDump.dump_stack_trace_with_labels(stack_trace),) + else: + print("No stack trace available for thread (%d)" % tid) + except WindowsError: + print("Can't get stack trace for thread (%d)" % tid) + + do_k = do_stack + + def do_break(self, arg): + """ + break - force a debug break in all debugees + break [process...] - force a debug break + """ + debug = self.debug + system = debug.system + targets = self.input_process_list(self.split_tokens(arg)) + if not targets: + targets = debug.get_debugee_pids() + targets.sort() + if self.lastEvent: + current = self.lastEvent.get_pid() + else: + current = None + for pid in targets: + if pid != current and debug.is_debugee(pid): + process = system.get_process(pid) + try: + process.debug_break() + except WindowsError: + print("Can't force a debug break on process (%d)") + + def do_step(self, arg): + """ + p - step on the current assembly instruction + next - step on the current assembly instruction + step - step on the current assembly instruction + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if self.lastEvent is None: + raise CmdError("no current process set") + if arg: # XXX this check is to be removed + raise CmdError("too many arguments") + pid = self.lastEvent.get_pid() + thread = self.lastEvent.get_thread() + pc = thread.get_pc() + code = thread.disassemble(pc, 16)[0] + size = code[1] + opcode = code[2].lower() + if ' ' in opcode: + opcode = opcode[: opcode.find(' ') ] + if opcode in self.jump_instructions or opcode in ('int', 'ret', 'retn'): + return self.do_trace(arg) + address = pc + size +# # print(hex(pc), hex(address), size # XXX DEBUG + self.debug.stalk_at(pid, address) + return True + + do_p = do_step + do_next = do_step + + def do_trace(self, arg): + """ + t - trace at the current assembly instruction + trace - trace at the current assembly instruction + """ + if arg: # XXX this check is to be removed + raise CmdError("too many arguments") + if self.lastEvent is None: + raise CmdError("no current thread set") + self.lastEvent.get_thread().set_tf() + return True + + do_t = do_trace + + def do_bp(self, arg): + """ + [~process] bp
    - set a code breakpoint + """ + pid = self.get_process_id_from_prefix() + if not self.debug.is_debugee(pid): + raise CmdError("target process is not being debugged") + process = self.get_process(pid) + token_list = self.split_tokens(arg, 1, 1) + try: + address = self.input_address(token_list[0], pid) + deferred = False + except Exception: + address = token_list[0] + deferred = True + if not address: + address = token_list[0] + deferred = True + self.debug.break_at(pid, address) + if deferred: + print("Deferred breakpoint set at %s" % address) + else: + print("Breakpoint set at %s" % address) + + def do_ba(self, arg): + """ + [~thread] ba <1|2|4|8>
    - set hardware breakpoint + """ + debug = self.debug + thread = self.get_thread_from_prefix() + pid = thread.get_pid() + tid = thread.get_tid() + if not debug.is_debugee(pid): + raise CmdError("target thread is not being debugged") + token_list = self.split_tokens(arg, 3, 3) + access = token_list[0].lower() + size = token_list[1] + address = token_list[2] + if access == 'a': + access = debug.BP_BREAK_ON_ACCESS + elif access == 'w': + access = debug.BP_BREAK_ON_WRITE + elif access == 'e': + access = debug.BP_BREAK_ON_EXECUTION + else: + raise CmdError("bad access type: %s" % token_list[0]) + if size == '1': + size = debug.BP_WATCH_BYTE + elif size == '2': + size = debug.BP_WATCH_WORD + elif size == '4': + size = debug.BP_WATCH_DWORD + elif size == '8': + size = debug.BP_WATCH_QWORD + else: + raise CmdError("bad breakpoint size: %s" % size) + thread = self.get_thread_from_prefix() + tid = thread.get_tid() + pid = thread.get_pid() + if not debug.is_debugee(pid): + raise CmdError("target process is not being debugged") + address = self.input_address(address, pid) + if debug.has_hardware_breakpoint(tid, address): + debug.erase_hardware_breakpoint(tid, address) + debug.define_hardware_breakpoint(tid, address, access, size) + debug.enable_hardware_breakpoint(tid, address) + + def do_bm(self, arg): + """ + [~process] bm - set memory breakpoint + """ + pid = self.get_process_id_from_prefix() + if not self.debug.is_debugee(pid): + raise CmdError("target process is not being debugged") + process = self.get_process(pid) + token_list = self.split_tokens(arg, 1, 2) + address, size = self.input_address_range(token_list[0], pid) + self.debug.watch_buffer(pid, address, size) + + def do_bl(self, arg): + """ + bl - list the breakpoints for the current process + bl * - list the breakpoints for all processes + [~process] bl - list the breakpoints for the given process + bl [process...] - list the breakpoints for each given process + """ + debug = self.debug + if arg == '*': + if self.cmdprefix: + raise CmdError("prefix not supported") + breakpoints = debug.get_debugee_pids() + else: + targets = self.input_process_list(self.split_tokens(arg)) + if self.cmdprefix: + targets.insert(0, self.input_process(self.cmdprefix)) + if not targets: + if self.lastEvent is None: + raise CmdError("no current process is set") + targets = [ self.lastEvent.get_pid() ] + for pid in targets: + bplist = debug.get_process_code_breakpoints(pid) + printed_process_banner = False + if bplist: + if not printed_process_banner: + print("Process %d:" % pid) + printed_process_banner = True + for bp in bplist: + address = repr(bp)[1:-1].replace('remote address ', '') + print(" %s" % address) + dbplist = debug.get_process_deferred_code_breakpoints(pid) + if dbplist: + if not printed_process_banner: + print("Process %d:" % pid) + printed_process_banner = True + for (label, action, oneshot) in dbplist: + if oneshot: + address = " Deferred unconditional one-shot" \ + " code breakpoint at %s" + else: + address = " Deferred unconditional" \ + " code breakpoint at %s" + address = address % label + print(" %s" % address) + bplist = debug.get_process_page_breakpoints(pid) + if bplist: + if not printed_process_banner: + print("Process %d:" % pid) + printed_process_banner = True + for bp in bplist: + address = repr(bp)[1:-1].replace('remote address ', '') + print(" %s" % address) + for tid in debug.system.get_process(pid).iter_thread_ids(): + bplist = debug.get_thread_hardware_breakpoints(tid) + if bplist: + print("Thread %d:" % tid) + for bp in bplist: + address = repr(bp)[1:-1].replace('remote address ', '') + print(" %s" % address) + + def do_bo(self, arg): + """ + [~process] bo
    - make a code breakpoint one-shot + [~thread] bo
    - make a hardware breakpoint one-shot + [~process] bo - make a memory breakpoint one-shot + [~process] bo
    - make a memory breakpoint one-shot + """ + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_breakpoint(token_list) + debug = self.debug + found = False + if size is None: + if tid is not None: + if debug.has_hardware_breakpoint(tid, address): + debug.enable_one_shot_hardware_breakpoint(tid, address) + found = True + if pid is not None: + if debug.has_code_breakpoint(pid, address): + debug.enable_one_shot_code_breakpoint(pid, address) + found = True + else: + if debug.has_page_breakpoint(pid, address): + debug.enable_one_shot_page_breakpoint(pid, address) + found = True + if not found: + print("Error: breakpoint not found.") + + def do_be(self, arg): + """ + [~process] be
    - enable a code breakpoint + [~thread] be
    - enable a hardware breakpoint + [~process] be - enable a memory breakpoint + [~process] be
    - enable a memory breakpoint + """ + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_breakpoint(token_list) + debug = self.debug + found = False + if size is None: + if tid is not None: + if debug.has_hardware_breakpoint(tid, address): + debug.enable_hardware_breakpoint(tid, address) + found = True + if pid is not None: + if debug.has_code_breakpoint(pid, address): + debug.enable_code_breakpoint(pid, address) + found = True + else: + if debug.has_page_breakpoint(pid, address): + debug.enable_page_breakpoint(pid, address) + found = True + if not found: + print("Error: breakpoint not found.") + + def do_bd(self, arg): + """ + [~process] bd
    - disable a code breakpoint + [~thread] bd
    - disable a hardware breakpoint + [~process] bd - disable a memory breakpoint + [~process] bd
    - disable a memory breakpoint + """ + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_breakpoint(token_list) + debug = self.debug + found = False + if size is None: + if tid is not None: + if debug.has_hardware_breakpoint(tid, address): + debug.disable_hardware_breakpoint(tid, address) + found = True + if pid is not None: + if debug.has_code_breakpoint(pid, address): + debug.disable_code_breakpoint(pid, address) + found = True + else: + if debug.has_page_breakpoint(pid, address): + debug.disable_page_breakpoint(pid, address) + found = True + if not found: + print("Error: breakpoint not found.") + + def do_bc(self, arg): + """ + [~process] bc
    - clear a code breakpoint + [~thread] bc
    - clear a hardware breakpoint + [~process] bc - clear a memory breakpoint + [~process] bc
    - clear a memory breakpoint + """ + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_breakpoint(token_list) + debug = self.debug + found = False + if size is None: + if tid is not None: + if debug.has_hardware_breakpoint(tid, address): + debug.dont_watch_variable(tid, address) + found = True + if pid is not None: + if debug.has_code_breakpoint(pid, address): + debug.dont_break_at(pid, address) + found = True + else: + if debug.has_page_breakpoint(pid, address): + debug.dont_watch_buffer(pid, address, size) + found = True + if not found: + print("Error: breakpoint not found.") + + def do_disassemble(self, arg): + """ + [~thread] u [register] - show code disassembly + [~process] u [address] - show code disassembly + [~thread] disassemble [register] - show code disassembly + [~process] disassemble [address] - show code disassembly + """ + if not arg: + arg = self.default_disasm_target + token_list = self.split_tokens(arg, 1, 1) + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + address = self.input_address(token_list[0], pid, tid) + try: + code = process.disassemble(address, 15 * 8)[:8] + except Exception: + msg = "can't disassemble address %s" + msg = msg % HexDump.address(address) + raise CmdError(msg) + if code: + label = process.get_label_at_address(address) + last_code = code[-1] + next_address = last_code[0] + last_code[1] + next_address = HexOutput.integer(next_address) + self.default_disasm_target = next_address + print("%s:" % label) +# # print(CrashDump.dump_code(code)) + for line in code: + print(CrashDump.dump_code_line(line, bShowDump=False)) + + do_u = do_disassemble + + def do_search(self, arg): + """ + [~process] s [address-address] + [~process] search [address-address] + """ + token_list = self.split_tokens(arg, 1, 3) + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + if len(token_list) == 1: + pattern = token_list[0] + minAddr = None + maxAddr = None + else: + pattern = token_list[-1] + addr, size = self.input_address_range(token_list[:-1], pid, tid) + minAddr = addr + maxAddr = addr + size + iter = process.search_bytes(pattern) + if process.get_bits() == 32: + addr_width = 8 + else: + addr_width = 16 + # TODO: need a prettier output here! + for addr in iter: + print(HexDump.address(addr, addr_width)) + + do_s = do_search + + def do_searchhex(self, arg): + """ + [~process] sh [address-address] + [~process] searchhex [address-address] + """ + token_list = self.split_tokens(arg, 1, 3) + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + if len(token_list) == 1: + pattern = token_list[0] + minAddr = None + maxAddr = None + else: + pattern = token_list[-1] + addr, size = self.input_address_range(token_list[:-1], pid, tid) + minAddr = addr + maxAddr = addr + size + iter = process.search_hexa(pattern) + if process.get_bits() == 32: + addr_width = 8 + else: + addr_width = 16 + for addr, bytes in iter: + print(HexDump.hexblock(bytes, addr, addr_width),) + + do_sh = do_searchhex + +# # def do_strings(self, arg): +# # """ +# # [~process] strings - extract ASCII strings from memory +# # """ +# # if arg: +# # raise CmdError("too many arguments") +# # pid, tid = self.get_process_and_thread_ids_from_prefix() +# # process = self.get_process(pid) +# # for addr, size, data in process.strings(): +# # print("%s: %r" % (HexDump.address(addr), data) + + def do_d(self, arg): + """ + [~thread] d - show memory contents + [~thread] d - show memory contents + [~thread] d - show memory contents + [~process] d
    - show memory contents + [~process] d - show memory contents + [~process] d
    - show memory contents + """ + return self.last_display_command(arg) + + def do_db(self, arg): + """ + [~thread] db - show memory contents as bytes + [~thread] db - show memory contents as bytes + [~thread] db - show memory contents as bytes + [~process] db
    - show memory contents as bytes + [~process] db - show memory contents as bytes + [~process] db
    - show memory contents as bytes + """ + self.print_memory_display(arg, HexDump.hexblock) + self.last_display_command = self.do_db + + def do_dw(self, arg): + """ + [~thread] dw - show memory contents as words + [~thread] dw - show memory contents as words + [~thread] dw - show memory contents as words + [~process] dw
    - show memory contents as words + [~process] dw - show memory contents as words + [~process] dw
    - show memory contents as words + """ + self.print_memory_display(arg, HexDump.hexblock_word) + self.last_display_command = self.do_dw + + def do_dd(self, arg): + """ + [~thread] dd - show memory contents as dwords + [~thread] dd - show memory contents as dwords + [~thread] dd - show memory contents as dwords + [~process] dd
    - show memory contents as dwords + [~process] dd - show memory contents as dwords + [~process] dd
    - show memory contents as dwords + """ + self.print_memory_display(arg, HexDump.hexblock_dword) + self.last_display_command = self.do_dd + + def do_dq(self, arg): + """ + [~thread] dq - show memory contents as qwords + [~thread] dq - show memory contents as qwords + [~thread] dq - show memory contents as qwords + [~process] dq
    - show memory contents as qwords + [~process] dq - show memory contents as qwords + [~process] dq
    - show memory contents as qwords + """ + self.print_memory_display(arg, HexDump.hexblock_qword) + self.last_display_command = self.do_dq + + # XXX TODO + # Change the way the default is used with ds and du + + def do_ds(self, arg): + """ + [~thread] ds - show memory contents as ANSI string + [~process] ds
    - show memory contents as ANSI string + """ + if not arg: + arg = self.default_display_target + token_list = self.split_tokens(arg, 1, 1) + pid, tid, address, size = self.input_display(token_list, 256) + process = self.get_process(pid) + data = process.peek_string(address, False, size) + if data: + print(repr(data)) + self.last_display_command = self.do_ds + + def do_du(self, arg): + """ + [~thread] du - show memory contents as Unicode string + [~process] du
    - show memory contents as Unicode string + """ + if not arg: + arg = self.default_display_target + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_display(token_list, 256) + process = self.get_process(pid) + data = process.peek_string(address, True, size) + if data: + print(repr(data)) + self.last_display_command = self.do_du + + def do_register(self, arg): + """ + [~thread] r - print(the value of all registers + [~thread] r - print(the value of a register + [~thread] r = - change the value of a register + [~thread] register - print(the value of all registers + [~thread] register - print(the value of a register + [~thread] register = - change the value of a register + """ + arg = arg.strip() + if not arg: + self.print_current_location() + else: + equ = arg.find('=') + if equ >= 0: + register = arg[:equ].strip() + value = arg[equ + 1:].strip() + if not value: + value = '0' + self.change_register(register, value) + else: + value = self.input_register(arg) + if value is None: + raise CmdError("unknown register: %s" % arg) + try: + label = None + thread = self.get_thread_from_prefix() + process = thread.get_process() + module = process.get_module_at_address(value) + if module: + label = module.get_label_at_address(value) + except RuntimeError: + label = None + reg = arg.upper() + val = HexDump.address(value) + if label: + print("%s: %s (%s)" % (reg, val, label)) + else: + print("%s: %s" % (reg, val)) + + do_r = do_register + + def do_eb(self, arg): + """ + [~process] eb
    - write the data to the specified address + """ + # TODO + # data parameter should be optional, use a child Cmd here + pid = self.get_process_id_from_prefix() + token_list = self.split_tokens(arg, 2) + address = self.input_address(token_list[0], pid) + data = HexInput.hexadecimal(' '.join(token_list[1:])) + self.write_memory(address, data, pid) + + # XXX TODO + # add ew, ed and eq here + + def do_find(self, arg): + """ + [~process] f - find the string in the process memory + [~process] find - find the string in the process memory + """ + if not arg: + raise CmdError("missing parameter: string") + process = self.get_process_from_prefix() + self.find_in_memory(arg, process) + + do_f = do_find + + def do_memory(self, arg): + """ + [~process] m - show the process memory map + [~process] memory - show the process memory map + """ + if arg: # TODO: take min and max addresses + raise CmdError("too many arguments") + process = self.get_process_from_prefix() + try: + memoryMap = process.get_memory_map() + mappedFilenames = process.get_mapped_filenames() + print('') + print(CrashDump.dump_memory_map(memoryMap, mappedFilenames)) + except WindowsError: + msg = "can't get memory information for process (%d)" + raise CmdError(msg % process.get_pid()) + + do_m = do_memory + +#------------------------------------------------------------------------------ +# Event handling + +# TODO +# * add configurable stop/don't stop behavior on events and exceptions + + # Stop for all events, unless stated otherwise. + def event(self, event): + self.print_event(event) + self.prompt_user() + + # Stop for all exceptions, unless stated otherwise. + def exception(self, event): + self.print_exception(event) + self.prompt_user() + + # Stop for breakpoint exceptions. + def breakpoint(self, event): + if hasattr(event, 'breakpoint') and event.breakpoint: + self.print_breakpoint_location(event) + else: + self.print_exception(event) + self.prompt_user() + + # Stop for WOW64 breakpoint exceptions. + def wow64_breakpoint(self, event): + self.print_exception(event) + self.prompt_user() + + # Stop for single step exceptions. + def single_step(self, event): + if event.debug.is_tracing(event.get_tid()): + self.print_breakpoint_location(event) + else: + self.print_exception(event) + self.prompt_user() + + # Don't stop for C++ exceptions. + def ms_vc_exception(self, event): + self.print_exception(event) + event.continueStatus = win32.DBG_CONTINUE + + # Don't stop for process start. + def create_process(self, event): + self.print_process_start(event) + self.print_thread_start(event) + self.print_module_load(event) + + # Don't stop for process exit. + def exit_process(self, event): + self.print_process_end(event) + + # Don't stop for thread creation. + def create_thread(self, event): + self.print_thread_start(event) + + # Don't stop for thread exit. + def exit_thread(self, event): + self.print_thread_end(event) + + # Don't stop for DLL load. + def load_dll(self, event): + self.print_module_load(event) + + # Don't stop for DLL unload. + def unload_dll(self, event): + self.print_module_unload(event) + + # Don't stop for debug strings. + def output_string(self, event): + self.print_debug_string(event) + +#------------------------------------------------------------------------------ +# History file + + def load_history(self): + global readline + if readline is None: + try: + import readline + except ImportError: + return + if self.history_file_full_path is None: + folder = os.environ.get('USERPROFILE', '') + if not folder: + folder = os.environ.get('HOME', '') + if not folder: + folder = os.path.split(sys.argv[0])[1] + if not folder: + folder = os.path.curdir + self.history_file_full_path = os.path.join(folder, + self.history_file) + try: + if os.path.exists(self.history_file_full_path): + readline.read_history_file(self.history_file_full_path) + except IOError: + e = sys.exc_info()[1] + warnings.warn("Cannot load history file, reason: %s" % str(e)) + + def save_history(self): + if self.history_file_full_path is not None: + global readline + if readline is None: + try: + import readline + except ImportError: + return + try: + readline.write_history_file(self.history_file_full_path) + except IOError: + e = sys.exc_info()[1] + warnings.warn("Cannot save history file, reason: %s" % str(e)) + +#------------------------------------------------------------------------------ +# Main loop + + # Debugging loop. + def loop(self): + self.debuggerExit = False + debug = self.debug + + # Stop on the initial event, if any. + if self.lastEvent is not None: + self.cmdqueue.append('r') + self.prompt_user() + + # Loop until the debugger is told to quit. + while not self.debuggerExit: + + try: + + # If for some reason the last event wasn't continued, + # continue it here. This won't be done more than once + # for a given Event instance, though. + try: + debug.cont() + # On error, show the command prompt. + except Exception: + traceback.print_exc() + self.prompt_user() + + # While debugees are attached, handle debug events. + # Some debug events may cause the command prompt to be shown. + if self.debug.get_debugee_count() > 0: + try: + + # Get the next debug event. + debug.wait() + + # Dispatch the debug event. + try: + debug.dispatch() + + # Continue the debug event. + finally: + debug.cont() + + # On error, show the command prompt. + except Exception: + traceback.print_exc() + self.prompt_user() + + # While no debugees are attached, show the command prompt. + else: + self.prompt_user() + + # When the user presses Ctrl-C send a debug break to all debugees. + except KeyboardInterrupt: + success = False + try: + print("*** User requested debug break") + system = debug.system + for pid in debug.get_debugee_pids(): + try: + system.get_process(pid).debug_break() + success = True + except: + traceback.print_exc() + except: + traceback.print_exc() + if not success: + raise # This should never happen! diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/module.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/module.py new file mode 120000 index 0000000..d06e3c2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/module.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/69/03/97/70acedcc1cc884136e11b4af68d67ec6d0c446f896ca74736d25697acc \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/process.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/process.py new file mode 120000 index 0000000..6e69c09 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/process.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/56/25/ef/3f0c04cf6fc42ab1be9b289c142aed5238abace982a27dfa02379423bf \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/registry.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/registry.py new file mode 120000 index 0000000..eadf145 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/registry.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/06/64/e1/2dfe53697b0f117ce4d7a3eab591c802b64aa522bb7f5b2b1bca05a49e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/search.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/search.py new file mode 120000 index 0000000..1bc7657 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/search.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/36/e5/28/ca853b94c668be26e06488e44cab51b31595b98dc54587ce26270cefe9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/sql.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/sql.py new file mode 120000 index 0000000..c93110d --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/sql.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bc/79/ce/1da6ee14732ae0906eeea7fd3baa2652b4933ac386c5dc6383cd5fbeec \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/system.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/system.py new file mode 120000 index 0000000..9484d50 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/system.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2f/9a/19/ab29faf249280e91673982646887538c595ed7fb413f06933df70acefa \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/textio.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/textio.py new file mode 120000 index 0000000..2361dbb --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/textio.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a2/c5/05/1eac6864174640b99fbcf5c416292f34f63f25b890b9be4ddbfb6079ed \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/thread.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/thread.py new file mode 120000 index 0000000..f776694 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/thread.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b8/4b/97/9bcc52ab88b064d7f05bac179eb4ef1787e9be4613cfef95414610da68 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/util.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/util.py new file mode 120000 index 0000000..95fcfac --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/util.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f4/61/43/e763c63a0d1ccc9734a1496913cb132551d99e66a6f38b5eabddfcc5e8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__init__.py new file mode 120000 index 0000000..3614371 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2e/33/cb/434a6feab7074266f7da3575d9b194c3f093b22d1ab5f36e3af920da77 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4ca9392 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/advapi32.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/advapi32.cpython-38.pyc new file mode 100644 index 0000000..516a289 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/advapi32.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/context_amd64.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/context_amd64.cpython-38.pyc new file mode 100644 index 0000000..b45f2f5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/context_amd64.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/context_i386.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/context_i386.cpython-38.pyc new file mode 100644 index 0000000..d89fab4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/context_i386.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/dbghelp.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/dbghelp.cpython-38.pyc new file mode 100644 index 0000000..19b66b1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/dbghelp.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/defines.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/defines.cpython-38.pyc new file mode 100644 index 0000000..d115cea Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/defines.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/gdi32.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/gdi32.cpython-38.pyc new file mode 100644 index 0000000..f8df989 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/gdi32.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/kernel32.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/kernel32.cpython-38.pyc new file mode 100644 index 0000000..fd54f92 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/kernel32.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/ntdll.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/ntdll.cpython-38.pyc new file mode 100644 index 0000000..c90a42f Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/ntdll.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/peb_teb.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/peb_teb.cpython-38.pyc new file mode 100644 index 0000000..a5f0c7a Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/peb_teb.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/psapi.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/psapi.cpython-38.pyc new file mode 100644 index 0000000..425de42 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/psapi.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/shell32.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/shell32.cpython-38.pyc new file mode 100644 index 0000000..3872501 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/shell32.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/shlwapi.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/shlwapi.cpython-38.pyc new file mode 100644 index 0000000..767627c Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/shlwapi.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/user32.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/user32.cpython-38.pyc new file mode 100644 index 0000000..d60f07a Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/user32.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/version.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000..8d2fb59 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/version.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/wtsapi32.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/wtsapi32.cpython-38.pyc new file mode 100644 index 0000000..722267d Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__pycache__/wtsapi32.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/advapi32.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/advapi32.py new file mode 120000 index 0000000..95b2fba --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/advapi32.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d8/41/6f/a9ebb15068a20c64f3907315e1f11e0d9bccc66ac1c83e61bbd0a533c0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_amd64.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_amd64.py new file mode 120000 index 0000000..9522591 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_amd64.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f1/e4/f5/fc6be67e9448d87596a9d3434498c0e93be9c1597c650715bd264508e6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_i386.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_i386.py new file mode 120000 index 0000000..a6d4ce8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_i386.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/08/b0/f9/440422ebce857c2b346eccc4821fd9545223f98ad412df0746f1c4e471 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/dbghelp.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/dbghelp.py new file mode 120000 index 0000000..8d91853 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/dbghelp.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ea/66/40/e6c0ef1ab21f3fbebe26fe9bbc860e3777234ebb98b76f42c6ed9029d4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/defines.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/defines.py new file mode 120000 index 0000000..8b39e4e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/defines.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/55/ea/dc/d342a76375d2c48d0a3296fcd54e0fea4e8c50709ef8d82cc651bb21ef \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/gdi32.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/gdi32.py new file mode 120000 index 0000000..b81a940 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/gdi32.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e6/2f/11/a46e37c0430061b7018c4c0969e9ab561ae517018a8642fde3b091fbb1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py new file mode 120000 index 0000000..f7cccb1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4d/e4/6c/00383b799375770eda19ea5b3886686c69a22b375d4fe6ca119f7b6d02 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/ntdll.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/ntdll.py new file mode 120000 index 0000000..ec0c2a5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/ntdll.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ec/f4/ae/9e5d62c59a39cbe6e897ec39de21381a12843d8bfb291888f7f0a23a6b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py new file mode 120000 index 0000000..3f782cd --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2a/30/97/d18b5efe0eedcb6e3485e85505b69dc66848d8cfbe6d5bb449f84a4bd1 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/psapi.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/psapi.py new file mode 120000 index 0000000..f0f736b --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/psapi.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/57/99/fd/1d22e7d1fbf9ab07bcdf332318605c4de276c282734bf85d8c6421a6ce \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shell32.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shell32.py new file mode 120000 index 0000000..2ac4f35 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shell32.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4f/63/00/3e1282a867e0fc9c581999160a1ce50ed0ec44079ccec2fc7c68526030 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shlwapi.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shlwapi.py new file mode 120000 index 0000000..7b7e543 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shlwapi.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c7/b8/d5/e7ff7d1ab654d30943f9e3d478cd3d52afcf27b2f533d630b9c20c504c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/user32.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/user32.py new file mode 120000 index 0000000..ad11f5a --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/user32.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f9/86/1d/56b3155195a9ccc20151b78b90d96143b542061be358b759b4df6ed15a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/version.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/version.py new file mode 120000 index 0000000..08104b0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/version.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b8/1b/6b/3e8b9bc0cbffd422d27f8da4a4f40e88eae29c2b58264c56ab33a5d3d2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/wtsapi32.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/wtsapi32.py new file mode 120000 index 0000000..dc5348c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/wtsapi32.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3f/ee/09/20fdfcaed6d0fa21ed763481b2deb9ea67c178510158f6723ab083fdcd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py new file mode 120000 index 0000000..99ff60f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/13/bc/ce/ac92975e6d8e9d1a7f5efb7cebcfc1c0455d87fdfd7ada834206582e34 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.cpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.cpp new file mode 120000 index 0000000..e912a24 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.cpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a9/12/55/769b44caf66d33bd31d17992906865dd4dbc118059f73081dbe196de91 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.h b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.h new file mode 120000 index 0000000..9eb769e --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.h @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ad/60/40/de47737f21da13d5fd0e2b9bddca23a099578c2dd32a72d0bfe9c609e0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/compile_windows.bat b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/compile_windows.bat new file mode 120000 index 0000000..2e32bd9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/compile_windows.bat @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/61/ad/dd/66389b7959e437e3375d7e521ff98b1982937292426524254b12fa6ba0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/inject_dll.cpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/inject_dll.cpp new file mode 120000 index 0000000..7391b16 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/inject_dll.cpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/19/09/99/6e934144c316d5615c0c53a525a18bcb5fa8678a82088ca8e5ec4f2c13 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/py_win_helpers.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/py_win_helpers.hpp new file mode 120000 index 0000000..1341e07 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/py_win_helpers.hpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/9a/4e/f9cd687ee6f89e7d1763d90cb139b7b38f3f10f13e55602195ddfbd382 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_in_memory.hpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_in_memory.hpp new file mode 120000 index 0000000..2d314c2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_in_memory.hpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ce/f0/c2/f5c54667a05712ccff226f90308b9b770eaf5f7042220d93d757157ef8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_on_dllmain.cpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_on_dllmain.cpp new file mode 120000 index 0000000..5d396e4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_on_dllmain.cpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/02/94/33/6bc649adf7b65388d324f6bc22dcef9c773bc361ba62b7eae014ffe7a8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.cpp b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.cpp new file mode 120000 index 0000000..948ad75 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.cpp @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/34/ef/2a/95ceec294ea81c0fc09d7252a1d1eea592e2ce9de7a414dcf841a9063f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.h b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.h new file mode 120000 index 0000000..b177ff9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.h @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/97/4b/44/231c62bf45eb21e889db379d4dde1f62b765c554caf040a9064f3cd563 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/targetver.h b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/targetver.h new file mode 120000 index 0000000..fd86666 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/targetver.h @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7e/ac/3e/88fa291bf57ea454717f7de55452f4baf59f0d4076932d1b89b15b94ad \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_file_utils.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_file_utils.py new file mode 100644 index 0000000..ffe0d7f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_file_utils.py @@ -0,0 +1,911 @@ +r''' + This module provides utilities to get the absolute filenames so that we can be sure that: + - The case of a file will match the actual file in the filesystem (otherwise breakpoints won't be hit). + - Providing means for the user to make path conversions when doing a remote debugging session in + one machine and debugging in another. + + To do that, the PATHS_FROM_ECLIPSE_TO_PYTHON constant must be filled with the appropriate paths. + + @note: + in this context, the server is where your python process is running + and the client is where eclipse is running. + + E.g.: + If the server (your python process) has the structure + /user/projects/my_project/src/package/module1.py + + and the client has: + c:\my_project\src\package\module1.py + + the PATHS_FROM_ECLIPSE_TO_PYTHON would have to be: + PATHS_FROM_ECLIPSE_TO_PYTHON = [(r'c:\my_project\src', r'/user/projects/my_project/src')] + + alternatively, this can be set with an environment variable from the command line: + set PATHS_FROM_ECLIPSE_TO_PYTHON=[['c:\my_project\src','/user/projects/my_project/src']] + + @note: DEBUG_CLIENT_SERVER_TRANSLATION can be set to True to debug the result of those translations + + @note: the case of the paths is important! Note that this can be tricky to get right when one machine + uses a case-independent filesystem and the other uses a case-dependent filesystem (if the system being + debugged is case-independent, 'normcase()' should be used on the paths defined in PATHS_FROM_ECLIPSE_TO_PYTHON). + + @note: all the paths with breakpoints must be translated (otherwise they won't be found in the server) + + @note: to enable remote debugging in the target machine (pydev extensions in the eclipse installation) + import pydevd;pydevd.settrace(host, stdoutToServer, stderrToServer, port, suspend) + + see parameter docs on pydevd.py + + @note: for doing a remote debugging session, all the pydevd_ files must be on the server accessible + through the PYTHONPATH (and the PATHS_FROM_ECLIPSE_TO_PYTHON only needs to be set on the target + machine for the paths that'll actually have breakpoints). +''' + +from _pydev_bundle import pydev_log +from _pydevd_bundle.pydevd_constants import DebugInfoHolder, IS_WINDOWS, IS_JYTHON, \ + DISABLE_FILE_VALIDATION, is_true_in_env +from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding +from _pydevd_bundle.pydevd_comm_constants import file_system_encoding, filesystem_encoding_is_utf8 +from _pydev_bundle.pydev_log import error_once + +import json +import os.path +import sys +import itertools +import ntpath +from functools import partial + +_nt_os_normcase = ntpath.normcase +os_path_basename = os.path.basename +os_path_exists = os.path.exists +join = os.path.join + +try: + FileNotFoundError +except NameError: + FileNotFoundError = IOError # noqa + +try: + os_path_real_path = os.path.realpath # @UndefinedVariable +except: + # jython does not support os.path.realpath + # realpath is a no-op on systems without islink support + os_path_real_path = os.path.abspath + + +def _get_library_dir(): + library_dir = None + try: + import sysconfig + library_dir = sysconfig.get_path('purelib') + except ImportError: + pass # i.e.: Only 2.7 onwards + + if library_dir is None or not os_path_exists(library_dir): + for path in sys.path: + if os_path_exists(path) and os.path.basename(path) == 'site-packages': + library_dir = path + break + + if library_dir is None or not os_path_exists(library_dir): + library_dir = os.path.dirname(os.__file__) + + return library_dir + + +# Note: we can't call sysconfig.get_path from _apply_func_and_normalize_case (it deadlocks on Python 2.7) so, we +# need to get the library dir during module loading. +_library_dir = _get_library_dir() + +# defined as a list of tuples where the 1st element of the tuple is the path in the client machine +# and the 2nd element is the path in the server machine. +# see module docstring for more details. +try: + PATHS_FROM_ECLIPSE_TO_PYTHON = json.loads(os.environ.get('PATHS_FROM_ECLIPSE_TO_PYTHON', '[]')) +except Exception: + pydev_log.critical('Error loading PATHS_FROM_ECLIPSE_TO_PYTHON from environment variable.') + pydev_log.exception() + PATHS_FROM_ECLIPSE_TO_PYTHON = [] +else: + if not isinstance(PATHS_FROM_ECLIPSE_TO_PYTHON, list): + pydev_log.critical('Expected PATHS_FROM_ECLIPSE_TO_PYTHON loaded from environment variable to be a list.') + PATHS_FROM_ECLIPSE_TO_PYTHON = [] + else: + # Converting json lists to tuple + PATHS_FROM_ECLIPSE_TO_PYTHON = [tuple(x) for x in PATHS_FROM_ECLIPSE_TO_PYTHON] + +# example: +# PATHS_FROM_ECLIPSE_TO_PYTHON = [ +# (r'd:\temp\temp_workspace_2\test_python\src\yyy\yyy', +# r'd:\temp\temp_workspace_2\test_python\src\hhh\xxx') +# ] + +convert_to_long_pathname = lambda filename:filename +convert_to_short_pathname = lambda filename:filename +get_path_with_real_case = lambda filename:filename + +if sys.platform == 'win32': + try: + import ctypes + from ctypes.wintypes import MAX_PATH, LPCWSTR, LPWSTR, DWORD + + GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW # noqa + GetLongPathName.argtypes = [LPCWSTR, LPWSTR, DWORD] + GetLongPathName.restype = DWORD + + GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW # noqa + GetShortPathName.argtypes = [LPCWSTR, LPWSTR, DWORD] + GetShortPathName.restype = DWORD + + def _convert_to_long_pathname(filename): + buf = ctypes.create_unicode_buffer(MAX_PATH) + + rv = GetLongPathName(filename, buf, MAX_PATH) + if rv != 0 and rv <= MAX_PATH: + filename = buf.value + + return filename + + def _convert_to_short_pathname(filename): + buf = ctypes.create_unicode_buffer(MAX_PATH) + + rv = GetShortPathName(filename, buf, MAX_PATH) + if rv != 0 and rv <= MAX_PATH: + filename = buf.value + + return filename + + # Note that we have a cache for previous list dirs... the only case where this may be an + # issue is if the user actually changes the case of an existing file on windows while + # the debugger is executing (as this seems very unlikely and the cache can save a + # reasonable time -- especially on mapped drives -- it seems nice to have it). + _listdir_cache = {} + + def _resolve_listing(resolved, iter_parts, cache=_listdir_cache): + while True: # Note: while True to make iterative and not recursive + try: + resolve_lowercase = next(iter_parts) # must be lowercase already + except StopIteration: + return resolved + + resolved_lower = resolved.lower() + + resolved_joined = cache.get((resolved_lower, resolve_lowercase)) + if resolved_joined is None: + dir_contents = cache.get(resolved_lower) + if dir_contents is None: + dir_contents = cache[resolved_lower] = os.listdir(resolved) + + for filename in dir_contents: + if filename.lower() == resolve_lowercase: + resolved_joined = os.path.join(resolved, filename) + cache[(resolved_lower, resolve_lowercase)] = resolved_joined + break + else: + raise FileNotFoundError('Unable to find: %s in %s' % ( + resolve_lowercase, resolved)) + + resolved = resolved_joined + + def _get_path_with_real_case(filename): + # Note: this previously made: + # convert_to_long_pathname(convert_to_short_pathname(filename)) + # but this is no longer done because we can't rely on getting the shortname + # consistently (there are settings to disable it on Windows). + # So, using approach which resolves by listing the dir. + + if '~' in filename: + filename = convert_to_long_pathname(filename) + + if filename.startswith('<') or not os_path_exists(filename): + return filename # Not much we can do. + + drive, parts = os.path.splitdrive(os.path.normpath(filename)) + drive = drive.upper() + while parts.startswith(os.path.sep): + parts = parts[1:] + drive += os.path.sep + parts = parts.lower().split(os.path.sep) + + try: + if parts == ['']: + return drive + return _resolve_listing(drive, iter(parts)) + except FileNotFoundError: + _listdir_cache.clear() + # Retry once after clearing the cache we have. + try: + return _resolve_listing(drive, iter(parts)) + except FileNotFoundError: + if os_path_exists(filename): + # This is really strange, ask the user to report as error. + pydev_log.critical( + 'pydev debugger: critical: unable to get real case for file. Details:\n' + 'filename: %s\ndrive: %s\nparts: %s\n' + '(please create a ticket in the tracker to address this).', + filename, drive, parts + ) + pydev_log.exception() + # Don't fail, just return the original file passed. + return filename + + # Check that it actually works + _get_path_with_real_case(__file__) + except: + # Something didn't quite work out, leave no-op conversions in place. + if DebugInfoHolder.DEBUG_TRACE_LEVEL > 2: + pydev_log.exception() + else: + convert_to_long_pathname = _convert_to_long_pathname + convert_to_short_pathname = _convert_to_short_pathname + get_path_with_real_case = _get_path_with_real_case + +elif IS_JYTHON and IS_WINDOWS: + + def get_path_with_real_case(filename): + from java.io import File # noqa + f = File(filename) + ret = f.getCanonicalPath() + return ret + +if IS_JYTHON: + + def _normcase_windows(filename): + return filename.lower() + +else: + + def _normcase_windows(filename): + # `normcase` doesn't lower case on Python 2 for non-English locale, so we should do it manually. + if '~' in filename: + filename = convert_to_long_pathname(filename) + + filename = _nt_os_normcase(filename) + return filename.lower() + + +def _normcase_linux(filename): + return filename # no-op + + +_filename_normalization = os.environ.get('PYDEVD_FILENAME_NORMALIZATION', '').lower() +if _filename_normalization == 'lower': + # Note: this is mostly for testing (forcing to always lower-case all contents + # internally -- used to mimick Windows normalization on Linux). + + def _normcase_lower(filename): + return filename.lower() + + _default_normcase = _normcase_lower + +elif _filename_normalization == 'none': + # Disable any filename normalization may be an option on Windows if the + # user is having issues under some circumstances. + _default_normcase = _normcase_linux + +elif IS_WINDOWS: + _default_normcase = _normcase_windows + +else: + _default_normcase = _normcase_linux + + +def normcase(s, NORMCASE_CACHE={}): + try: + return NORMCASE_CACHE[s] + except: + normalized = NORMCASE_CACHE[s] = _default_normcase(s) + return normalized + + +_ide_os = 'WINDOWS' if IS_WINDOWS else 'UNIX' + +_normcase_from_client = normcase + + +def normcase_from_client(s): + return _normcase_from_client(s) + + +DEBUG_CLIENT_SERVER_TRANSLATION = os.environ.get('DEBUG_PYDEVD_PATHS_TRANSLATION', 'False').lower() in ('1', 'true') + + +def set_ide_os(os): + ''' + We need to set the IDE os because the host where the code is running may be + actually different from the client (and the point is that we want the proper + paths to translate from the client to the server). + + :param os: + 'UNIX' or 'WINDOWS' + ''' + global _ide_os + global _normcase_from_client + prev = _ide_os + if os == 'WIN': # Apparently PyCharm uses 'WIN' (https://github.com/fabioz/PyDev.Debugger/issues/116) + os = 'WINDOWS' + + assert os in ('WINDOWS', 'UNIX') + + if DEBUG_CLIENT_SERVER_TRANSLATION: + print('pydev debugger: client OS: %s' % (os,)) + + _normcase_from_client = normcase + if os == 'WINDOWS': + + # Client in Windows and server in Unix, we need to normalize the case. + if not IS_WINDOWS: + _normcase_from_client = _normcase_windows + + else: + # Client in Unix and server in Windows, we can't normalize the case. + if IS_WINDOWS: + _normcase_from_client = _normcase_linux + + if prev != os: + _ide_os = os + # We need to (re)setup how the client <-> server translation works to provide proper separators. + setup_client_server_paths(_last_client_server_paths_set) + + +# Caches filled as requested during the debug session. +NORM_PATHS_CONTAINER = {} +NORM_PATHS_AND_BASE_CONTAINER = {} + + +def canonical_normalized_path(filename): + ''' + This returns a filename that is canonical and it's meant to be used internally + to store information on breakpoints and see if there's any hit on it. + + Note that this version is only internal as it may not match the case and + may have symlinks resolved (and thus may not match what the user expects + in the editor). + ''' + return get_abs_path_real_path_and_base_from_file(filename)[1] + + +def absolute_path(filename): + ''' + Provides a version of the filename that's absolute (and NOT normalized). + ''' + return get_abs_path_real_path_and_base_from_file(filename)[0] + + +def basename(filename): + ''' + Provides the basename for a file. + ''' + return get_abs_path_real_path_and_base_from_file(filename)[2] + + +# Returns tuple of absolute path and real path for given filename +def _abs_and_canonical_path(filename, NORM_PATHS_CONTAINER=NORM_PATHS_CONTAINER): + try: + return NORM_PATHS_CONTAINER[filename] + except: + if filename.__class__ != str: + raise AssertionError('Paths passed to _abs_and_canonical_path must be str. Found: %s (%s)' % (filename, type(filename))) + if os is None: # Interpreter shutdown + return filename, filename + + os_path = os.path + if os_path is None: # Interpreter shutdown + return filename, filename + + os_path_abspath = os_path.abspath + os_path_isabs = os_path.isabs + + if os_path_abspath is None or os_path_isabs is None or os_path_real_path is None: # Interpreter shutdown + return filename, filename + + isabs = os_path_isabs(filename) + + if _global_resolve_symlinks: + os_path_abspath = os_path_real_path + + normalize = False + abs_path = _apply_func_and_normalize_case(filename, os_path_abspath, isabs, normalize) + + normalize = True + real_path = _apply_func_and_normalize_case(filename, os_path_real_path, isabs, normalize) + + # cache it for fast access later + NORM_PATHS_CONTAINER[filename] = abs_path, real_path + return abs_path, real_path + + +def _get_relative_filename_abs_path(filename, func, os_path_exists=os_path_exists): + # If we have a relative path and the file does not exist when made absolute, try to + # resolve it based on the sys.path entries. + for p in sys.path: + r = func(os.path.join(p, filename)) + if os_path_exists(r): + return r + + # We couldn't find the real file for the relative path. Resolve it as if it was in + # a library (so that it's considered a library file and not a project file). + r = func(os.path.join(_library_dir, filename)) + return r + + +def _apply_func_and_normalize_case(filename, func, isabs, normalize_case, os_path_exists=os_path_exists, join=join): + if filename.startswith('<'): + # Not really a file, rather a synthetic name like or ; + # shouldn't be normalized. + return filename + + r = func(filename) + + if not isabs: + if not os_path_exists(r): + r = _get_relative_filename_abs_path(filename, func) + + ind = r.find('.zip') + if ind == -1: + ind = r.find('.egg') + if ind != -1: + ind += 4 + zip_path = r[:ind] + inner_path = r[ind:] + if inner_path.startswith('!'): + # Note (fabioz): although I can replicate this by creating a file ending as + # .zip! or .egg!, I don't really know what's the real-world case for this + # (still kept as it was added by @jetbrains, but it should probably be reviewed + # later on). + # Note 2: it goes hand-in-hand with 'exists'. + inner_path = inner_path[1:] + zip_path = zip_path + '!' + + if inner_path.startswith('/') or inner_path.startswith('\\'): + inner_path = inner_path[1:] + if inner_path: + if normalize_case: + r = join(normcase(zip_path), inner_path) + else: + r = join(zip_path, inner_path) + return r + + if normalize_case: + r = normcase(r) + return r + + +_ZIP_SEARCH_CACHE = {} +_NOT_FOUND_SENTINEL = object() + + +def exists(filename): + if os_path_exists(filename): + return True + + if not os.path.isabs(filename): + filename = _get_relative_filename_abs_path(filename, os.path.abspath) + if os_path_exists(filename): + return True + + ind = filename.find('.zip') + if ind == -1: + ind = filename.find('.egg') + + if ind != -1: + ind += 4 + zip_path = filename[:ind] + inner_path = filename[ind:] + if inner_path.startswith("!"): + # Note (fabioz): although I can replicate this by creating a file ending as + # .zip! or .egg!, I don't really know what's the real-world case for this + # (still kept as it was added by @jetbrains, but it should probably be reviewed + # later on). + # Note 2: it goes hand-in-hand with '_apply_func_and_normalize_case'. + inner_path = inner_path[1:] + zip_path = zip_path + '!' + + zip_file_obj = _ZIP_SEARCH_CACHE.get(zip_path, _NOT_FOUND_SENTINEL) + if zip_file_obj is None: + return False + elif zip_file_obj is _NOT_FOUND_SENTINEL: + try: + import zipfile + zip_file_obj = zipfile.ZipFile(zip_path, 'r') + _ZIP_SEARCH_CACHE[zip_path] = zip_file_obj + except: + _ZIP_SEARCH_CACHE[zip_path] = _NOT_FOUND_SENTINEL + return False + + try: + if inner_path.startswith('/') or inner_path.startswith('\\'): + inner_path = inner_path[1:] + + _info = zip_file_obj.getinfo(inner_path.replace('\\', '/')) + + return join(zip_path, inner_path) + except KeyError: + return False + + else: + pydev_log.debug('os.path.exists(%r) returned False.', filename) + + return False + + +try: + report = pydev_log.critical + if DISABLE_FILE_VALIDATION: + report = pydev_log.debug + + try: + code = os_path_real_path.func_code + except AttributeError: + code = os_path_real_path.__code__ + + if code.co_filename.startswith(' in this case). + f = '' + + if f.startswith('<'): + return f, normcase(f), f + + if _abs_and_canonical_path is None: # Interpreter shutdown + i = max(f.rfind('/'), f.rfind('\\')) + return (f, f, f[i + 1:]) + + if f is not None: + if f.endswith('.pyc'): + f = f[:-1] + elif f.endswith('$py.class'): + f = f[:-len('$py.class')] + '.py' + + abs_path, canonical_normalized_filename = _abs_and_canonical_path(f) + + try: + base = os_path_basename(canonical_normalized_filename) + except AttributeError: + # Error during shutdown. + i = max(f.rfind('/'), f.rfind('\\')) + base = f[i + 1:] + ret = abs_path, canonical_normalized_filename, base + NORM_PATHS_AND_BASE_CONTAINER[filename] = ret + return ret + + +def get_abs_path_real_path_and_base_from_frame(frame, NORM_PATHS_AND_BASE_CONTAINER=NORM_PATHS_AND_BASE_CONTAINER): + try: + return NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + # This one is just internal (so, does not need any kind of client-server translation) + f = frame.f_code.co_filename + + if f is not None and f.startswith (('build/bdist.', 'build\\bdist.')): + # files from eggs in Python 2.7 have paths like build/bdist.linux-x86_64/egg/ + f = frame.f_globals['__file__'] + + if get_abs_path_real_path_and_base_from_file is None: + # Interpreter shutdown + if not f: + # i.e.: it's possible that the user compiled code with an empty string (consider + # it as in this case). + f = '' + i = max(f.rfind('/'), f.rfind('\\')) + return f, f, f[i + 1:] + + ret = get_abs_path_real_path_and_base_from_file(f) + # Also cache based on the frame.f_code.co_filename (if we had it inside build/bdist it can make a difference). + NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] = ret + return ret + + +def get_fullname(mod_name): + import pkgutil + try: + loader = pkgutil.get_loader(mod_name) + except: + return None + if loader is not None: + for attr in ("get_filename", "_get_filename"): + meth = getattr(loader, attr, None) + if meth is not None: + return meth(mod_name) + return None + + +def get_package_dir(mod_name): + for path in sys.path: + mod_path = join(path, mod_name.replace('.', '/')) + if os.path.isdir(mod_path): + return mod_path + return None + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__init__.py new file mode 120000 index 0000000..7075117 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/de/b6/43/3214a6a42137cbe8a98cf8d0ad4bf60889b9a21082a6b48952832b72a4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..a303e85 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/django_debug.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/django_debug.cpython-38.pyc new file mode 100644 index 0000000..63e9af1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/django_debug.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/jinja2_debug.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/jinja2_debug.cpython-38.pyc new file mode 100644 index 0000000..665e765 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/jinja2_debug.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/pydevd_line_validation.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/pydevd_line_validation.cpython-38.pyc new file mode 100644 index 0000000..27c7cd6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/__pycache__/pydevd_line_validation.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/django_debug.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/django_debug.py new file mode 100644 index 0000000..ff7f1eb --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/django_debug.py @@ -0,0 +1,613 @@ +import inspect + +from _pydev_bundle import pydev_log +from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK +from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, DJANGO_SUSPEND, \ + DebugInfoHolder +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode, just_raised, ignore_exception_trace +from pydevd_file_utils import canonical_normalized_path, absolute_path +from _pydevd_bundle.pydevd_api import PyDevdAPI +from pydevd_plugins.pydevd_line_validation import LineBreakpointWithLazyValidation, ValidationInfo +from _pydev_bundle.pydev_override import overrides + +IS_DJANGO18 = False +IS_DJANGO19 = False +IS_DJANGO19_OR_HIGHER = False +try: + import django + version = django.VERSION + IS_DJANGO18 = version[0] == 1 and version[1] == 8 + IS_DJANGO19 = version[0] == 1 and version[1] == 9 + IS_DJANGO19_OR_HIGHER = ((version[0] == 1 and version[1] >= 9) or version[0] > 1) +except: + pass + + +class DjangoLineBreakpoint(LineBreakpointWithLazyValidation): + + def __init__(self, canonical_normalized_filename, breakpoint_id, line, condition, func_name, expression, hit_condition=None, is_logpoint=False): + self.canonical_normalized_filename = canonical_normalized_filename + LineBreakpointWithLazyValidation.__init__(self, breakpoint_id, line, condition, func_name, expression, hit_condition=hit_condition, is_logpoint=is_logpoint) + + def __str__(self): + return "DjangoLineBreakpoint: %s-%d" % (self.canonical_normalized_filename, self.line) + + +class _DjangoValidationInfo(ValidationInfo): + + @overrides(ValidationInfo._collect_valid_lines_in_template_uncached) + def _collect_valid_lines_in_template_uncached(self, template): + lines = set() + for node in self._iternodes(template.nodelist): + if node.__class__.__name__ in _IGNORE_RENDER_OF_CLASSES: + continue + lineno = self._get_lineno(node) + if lineno is not None: + lines.add(lineno) + return lines + + def _get_lineno(self, node): + if hasattr(node, 'token') and hasattr(node.token, 'lineno'): + return node.token.lineno + return None + + def _iternodes(self, nodelist): + for node in nodelist: + yield node + + try: + children = node.child_nodelists + except: + pass + else: + for attr in children: + nodelist = getattr(node, attr, None) + if nodelist: + # i.e.: yield from _iternodes(nodelist) + for node in self._iternodes(nodelist): + yield node + + +def add_line_breakpoint(plugin, pydb, type, canonical_normalized_filename, breakpoint_id, line, condition, expression, func_name, hit_condition=None, is_logpoint=False, add_breakpoint_result=None, on_changed_breakpoint_state=None): + if type == 'django-line': + django_line_breakpoint = DjangoLineBreakpoint(canonical_normalized_filename, breakpoint_id, line, condition, func_name, expression, hit_condition=hit_condition, is_logpoint=is_logpoint) + if not hasattr(pydb, 'django_breakpoints'): + _init_plugin_breaks(pydb) + + if IS_DJANGO19_OR_HIGHER: + add_breakpoint_result.error_code = PyDevdAPI.ADD_BREAKPOINT_LAZY_VALIDATION + django_line_breakpoint.add_breakpoint_result = add_breakpoint_result + django_line_breakpoint.on_changed_breakpoint_state = on_changed_breakpoint_state + else: + add_breakpoint_result.error_code = PyDevdAPI.ADD_BREAKPOINT_NO_ERROR + + return django_line_breakpoint, pydb.django_breakpoints + return None + + +def after_breakpoints_consolidated(plugin, py_db, canonical_normalized_filename, id_to_pybreakpoint, file_to_line_to_breakpoints): + if IS_DJANGO19_OR_HIGHER: + django_breakpoints_for_file = file_to_line_to_breakpoints.get(canonical_normalized_filename) + if not django_breakpoints_for_file: + return + + if not hasattr(py_db, 'django_validation_info'): + _init_plugin_breaks(py_db) + + # In general we validate the breakpoints only when the template is loaded, but if the template + # was already loaded, we can validate the breakpoints based on the last loaded value. + py_db.django_validation_info.verify_breakpoints_from_template_cached_lines( + py_db, canonical_normalized_filename, django_breakpoints_for_file) + + +def add_exception_breakpoint(plugin, pydb, type, exception): + if type == 'django': + if not hasattr(pydb, 'django_exception_break'): + _init_plugin_breaks(pydb) + pydb.django_exception_break[exception] = True + return True + return False + + +def _init_plugin_breaks(pydb): + pydb.django_exception_break = {} + pydb.django_breakpoints = {} + + pydb.django_validation_info = _DjangoValidationInfo() + + +def remove_exception_breakpoint(plugin, pydb, type, exception): + if type == 'django': + try: + del pydb.django_exception_break[exception] + return True + except: + pass + return False + + +def remove_all_exception_breakpoints(plugin, pydb): + if hasattr(pydb, 'django_exception_break'): + pydb.django_exception_break = {} + return True + return False + + +def get_breakpoints(plugin, pydb, type): + if type == 'django-line': + return pydb.django_breakpoints + return None + + +def _inherits(cls, *names): + if cls.__name__ in names: + return True + inherits_node = False + for base in inspect.getmro(cls): + if base.__name__ in names: + inherits_node = True + break + return inherits_node + + +_IGNORE_RENDER_OF_CLASSES = ('TextNode', 'NodeList') + + +def _is_django_render_call(frame, debug=False): + try: + name = frame.f_code.co_name + if name != 'render': + return False + + if 'self' not in frame.f_locals: + return False + + cls = frame.f_locals['self'].__class__ + + inherits_node = _inherits(cls, 'Node') + + if not inherits_node: + return False + + clsname = cls.__name__ + if IS_DJANGO19: + # in Django 1.9 we need to save the flag that there is included template + if clsname == 'IncludeNode': + if 'context' in frame.f_locals: + context = frame.f_locals['context'] + context._has_included_template = True + + return clsname not in _IGNORE_RENDER_OF_CLASSES + except: + pydev_log.exception() + return False + + +def _is_django_context_get_call(frame): + try: + if 'self' not in frame.f_locals: + return False + + cls = frame.f_locals['self'].__class__ + + return _inherits(cls, 'BaseContext') + except: + pydev_log.exception() + return False + + +def _is_django_resolve_call(frame): + try: + name = frame.f_code.co_name + if name != '_resolve_lookup': + return False + + if 'self' not in frame.f_locals: + return False + + cls = frame.f_locals['self'].__class__ + + clsname = cls.__name__ + return clsname == 'Variable' + except: + pydev_log.exception() + return False + + +def _is_django_suspended(thread): + return thread.additional_info.suspend_type == DJANGO_SUSPEND + + +def suspend_django(main_debugger, thread, frame, cmd=CMD_SET_BREAK): + if frame.f_lineno is None: + return None + + main_debugger.set_suspend(thread, cmd) + thread.additional_info.suspend_type = DJANGO_SUSPEND + + return frame + + +def _find_django_render_frame(frame): + while frame is not None and not _is_django_render_call(frame): + frame = frame.f_back + + return frame + +#======================================================================================================================= +# Django Frame +#======================================================================================================================= + + +def _read_file(filename): + # type: (str) -> str + f = open(filename, 'r', encoding='utf-8', errors='replace') + s = f.read() + f.close() + return s + + +def _offset_to_line_number(text, offset): + curLine = 1 + curOffset = 0 + while curOffset < offset: + if curOffset == len(text): + return -1 + c = text[curOffset] + if c == '\n': + curLine += 1 + elif c == '\r': + curLine += 1 + if curOffset < len(text) and text[curOffset + 1] == '\n': + curOffset += 1 + + curOffset += 1 + + return curLine + + +def _get_source_django_18_or_lower(frame): + # This method is usable only for the Django <= 1.8 + try: + node = frame.f_locals['self'] + if hasattr(node, 'source'): + return node.source + else: + if IS_DJANGO18: + # The debug setting was changed since Django 1.8 + pydev_log.error_once("WARNING: Template path is not available. Set the 'debug' option in the OPTIONS of a DjangoTemplates " + "backend.") + else: + # The debug setting for Django < 1.8 + pydev_log.error_once("WARNING: Template path is not available. Please set TEMPLATE_DEBUG=True in your settings.py to make " + "django template breakpoints working") + return None + + except: + pydev_log.exception() + return None + + +def _convert_to_str(s): + return s + + +def _get_template_original_file_name_from_frame(frame): + try: + if IS_DJANGO19: + # The Node source was removed since Django 1.9 + if 'context' in frame.f_locals: + context = frame.f_locals['context'] + if hasattr(context, '_has_included_template'): + # if there was included template we need to inspect the previous frames and find its name + back = frame.f_back + while back is not None and frame.f_code.co_name in ('render', '_render'): + locals = back.f_locals + if 'self' in locals: + self = locals['self'] + if self.__class__.__name__ == 'Template' and hasattr(self, 'origin') and \ + hasattr(self.origin, 'name'): + return _convert_to_str(self.origin.name) + back = back.f_back + else: + if hasattr(context, 'template') and hasattr(context.template, 'origin') and \ + hasattr(context.template.origin, 'name'): + return _convert_to_str(context.template.origin.name) + return None + elif IS_DJANGO19_OR_HIGHER: + # For Django 1.10 and later there is much simpler way to get template name + if 'self' in frame.f_locals: + self = frame.f_locals['self'] + if hasattr(self, 'origin') and hasattr(self.origin, 'name'): + return _convert_to_str(self.origin.name) + return None + + source = _get_source_django_18_or_lower(frame) + if source is None: + pydev_log.debug("Source is None\n") + return None + fname = _convert_to_str(source[0].name) + + if fname == '': + pydev_log.debug("Source name is %s\n" % fname) + return None + else: + return fname + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2: + pydev_log.exception('Error getting django template filename.') + return None + + +def _get_template_line(frame): + if IS_DJANGO19_OR_HIGHER: + node = frame.f_locals['self'] + if hasattr(node, 'token') and hasattr(node.token, 'lineno'): + return node.token.lineno + else: + return None + + source = _get_source_django_18_or_lower(frame) + original_filename = _get_template_original_file_name_from_frame(frame) + if original_filename is not None: + try: + absolute_filename = absolute_path(original_filename) + return _offset_to_line_number(_read_file(absolute_filename), source[1][0]) + except: + return None + return None + + +class DjangoTemplateFrame(object): + + IS_PLUGIN_FRAME = True + + def __init__(self, frame): + original_filename = _get_template_original_file_name_from_frame(frame) + self._back_context = frame.f_locals['context'] + self.f_code = FCode('Django Template', original_filename) + self.f_lineno = _get_template_line(frame) + self.f_back = frame + self.f_globals = {} + self.f_locals = self._collect_context(self._back_context) + self.f_trace = None + + def _collect_context(self, context): + res = {} + try: + for d in context.dicts: + for k, v in d.items(): + res[k] = v + except AttributeError: + pass + return res + + def _change_variable(self, name, value): + for d in self._back_context.dicts: + for k, v in d.items(): + if k == name: + d[k] = value + + +class DjangoTemplateSyntaxErrorFrame(object): + + IS_PLUGIN_FRAME = True + + def __init__(self, frame, original_filename, lineno, f_locals): + self.f_code = FCode('Django TemplateSyntaxError', original_filename) + self.f_lineno = lineno + self.f_back = frame + self.f_globals = {} + self.f_locals = f_locals + self.f_trace = None + + +def change_variable(plugin, frame, attr, expression): + if isinstance(frame, DjangoTemplateFrame): + result = eval(expression, frame.f_globals, frame.f_locals) + frame._change_variable(attr, result) + return result + return False + + +def _is_django_variable_does_not_exist_exception_break_context(frame): + try: + name = frame.f_code.co_name + except: + name = None + return name in ('_resolve_lookup', 'find_template') + + +def _is_ignoring_failures(frame): + while frame is not None: + if frame.f_code.co_name == 'resolve': + ignore_failures = frame.f_locals.get('ignore_failures') + if ignore_failures: + return True + frame = frame.f_back + + return False + +#======================================================================================================================= +# Django Step Commands +#======================================================================================================================= + + +def can_skip(plugin, main_debugger, frame): + if main_debugger.django_breakpoints: + if _is_django_render_call(frame): + return False + + if main_debugger.django_exception_break: + module_name = frame.f_globals.get('__name__', '') + + if module_name == 'django.template.base': + # Exceptions raised at django.template.base must be checked. + return False + + return True + + +def has_exception_breaks(plugin): + if len(plugin.main_debugger.django_exception_break) > 0: + return True + return False + + +def has_line_breaks(plugin): + for _canonical_normalized_filename, breakpoints in plugin.main_debugger.django_breakpoints.items(): + if len(breakpoints) > 0: + return True + return False + + +def cmd_step_into(plugin, main_debugger, frame, event, args, stop_info, stop): + info = args[2] + thread = args[3] + plugin_stop = False + if _is_django_suspended(thread): + stop_info['django_stop'] = event == 'call' and _is_django_render_call(frame) + plugin_stop = stop_info['django_stop'] + stop = stop and _is_django_resolve_call(frame.f_back) and not _is_django_context_get_call(frame) + if stop: + info.pydev_django_resolve_frame = True # we remember that we've go into python code from django rendering frame + return stop, plugin_stop + + +def cmd_step_over(plugin, main_debugger, frame, event, args, stop_info, stop): + info = args[2] + thread = args[3] + plugin_stop = False + if _is_django_suspended(thread): + stop_info['django_stop'] = event == 'call' and _is_django_render_call(frame) + plugin_stop = stop_info['django_stop'] + stop = False + return stop, plugin_stop + else: + if event == 'return' and info.pydev_django_resolve_frame and _is_django_resolve_call(frame.f_back): + # we return to Django suspend mode and should not stop before django rendering frame + info.pydev_step_stop = frame.f_back + info.pydev_django_resolve_frame = False + thread.additional_info.suspend_type = DJANGO_SUSPEND + stop = info.pydev_step_stop is frame and event in ('line', 'return') + return stop, plugin_stop + + +def stop(plugin, main_debugger, frame, event, args, stop_info, arg, step_cmd): + main_debugger = args[0] + thread = args[3] + if 'django_stop' in stop_info and stop_info['django_stop']: + frame = suspend_django(main_debugger, thread, DjangoTemplateFrame(frame), step_cmd) + if frame: + main_debugger.do_wait_suspend(thread, frame, event, arg) + return True + return False + + +def get_breakpoint(plugin, py_db, pydb_frame, frame, event, args): + py_db = args[0] + _filename = args[1] + info = args[2] + breakpoint_type = 'django' + + if event == 'call' and info.pydev_state != STATE_SUSPEND and py_db.django_breakpoints and _is_django_render_call(frame): + original_filename = _get_template_original_file_name_from_frame(frame) + pydev_log.debug("Django is rendering a template: %s", original_filename) + + canonical_normalized_filename = canonical_normalized_path(original_filename) + django_breakpoints_for_file = py_db.django_breakpoints.get(canonical_normalized_filename) + + if django_breakpoints_for_file: + + # At this point, let's validate whether template lines are correct. + if IS_DJANGO19_OR_HIGHER: + django_validation_info = py_db.django_validation_info + context = frame.f_locals['context'] + django_template = context.template + django_validation_info.verify_breakpoints(py_db, canonical_normalized_filename, django_breakpoints_for_file, django_template) + + pydev_log.debug("Breakpoints for that file: %s", django_breakpoints_for_file) + template_line = _get_template_line(frame) + pydev_log.debug("Tracing template line: %s", template_line) + + if template_line in django_breakpoints_for_file: + django_breakpoint = django_breakpoints_for_file[template_line] + new_frame = DjangoTemplateFrame(frame) + return True, django_breakpoint, new_frame, breakpoint_type + + return False, None, None, breakpoint_type + + +def suspend(plugin, main_debugger, thread, frame, bp_type): + if bp_type == 'django': + return suspend_django(main_debugger, thread, DjangoTemplateFrame(frame)) + return None + + +def _get_original_filename_from_origin_in_parent_frame_locals(frame, parent_frame_name): + filename = None + parent_frame = frame + while parent_frame.f_code.co_name != parent_frame_name: + parent_frame = parent_frame.f_back + + origin = None + if parent_frame is not None: + origin = parent_frame.f_locals.get('origin') + + if hasattr(origin, 'name') and origin.name is not None: + filename = _convert_to_str(origin.name) + return filename + + +def exception_break(plugin, main_debugger, pydb_frame, frame, args, arg): + main_debugger = args[0] + thread = args[3] + exception, value, trace = arg + + if main_debugger.django_exception_break and exception is not None: + if exception.__name__ in ['VariableDoesNotExist', 'TemplateDoesNotExist', 'TemplateSyntaxError'] and \ + just_raised(trace) and not ignore_exception_trace(trace): + + if exception.__name__ == 'TemplateSyntaxError': + # In this case we don't actually have a regular render frame with the context + # (we didn't really get to that point). + token = getattr(value, 'token', None) + + if token is None: + # Django 1.7 does not have token in exception. Try to get it from locals. + token = frame.f_locals.get('token') + + lineno = getattr(token, 'lineno', None) + + original_filename = None + if lineno is not None: + original_filename = _get_original_filename_from_origin_in_parent_frame_locals(frame, 'get_template') + + if original_filename is None: + # Django 1.7 does not have origin in get_template. Try to get it from + # load_template. + original_filename = _get_original_filename_from_origin_in_parent_frame_locals(frame, 'load_template') + + if original_filename is not None and lineno is not None: + syntax_error_frame = DjangoTemplateSyntaxErrorFrame( + frame, original_filename, lineno, {'token': token, 'exception': exception}) + + suspend_frame = suspend_django( + main_debugger, thread, syntax_error_frame, CMD_ADD_EXCEPTION_BREAK) + return True, suspend_frame + + elif exception.__name__ == 'VariableDoesNotExist': + if _is_django_variable_does_not_exist_exception_break_context(frame): + if not getattr(exception, 'silent_variable_failure', False) and not _is_ignoring_failures(frame): + render_frame = _find_django_render_frame(frame) + if render_frame: + suspend_frame = suspend_django( + main_debugger, thread, DjangoTemplateFrame(render_frame), CMD_ADD_EXCEPTION_BREAK) + if suspend_frame: + add_exception_to_frame(suspend_frame, (exception, value, trace)) + thread.additional_info.pydev_message = 'VariableDoesNotExist' + suspend_frame.f_back = frame + frame = suspend_frame + return True, frame + + return None diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/README.md b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/README.md new file mode 120000 index 0000000..c48f3ff --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/README.md @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/73/1b/bc/176f79b27515360a84e715ca9d94e96ea6d1dcb4e69bad2980ad08393b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/__init__.py new file mode 120000 index 0000000..ed18dfe --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ae/ab/4d/b1f0dc2a0424703b75c0ad924d72eb207f0dbf536c5ac4077bf983677d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..e207192 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__init__.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__init__.py new file mode 120000 index 0000000..ed18dfe --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ae/ab/4d/b1f0dc2a0424703b75c0ad924d72eb207f0dbf536c5ac4077bf983677d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..e93c31e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_helpers.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_helpers.cpython-38.pyc new file mode 100644 index 0000000..94a8b43 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_helpers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugin_numpy_types.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugin_numpy_types.cpython-38.pyc new file mode 100644 index 0000000..8f2b2b1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugin_numpy_types.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugin_pandas_types.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugin_pandas_types.cpython-38.pyc new file mode 100644 index 0000000..2fbe343 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugin_pandas_types.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugins_django_form_str.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugins_django_form_str.cpython-38.pyc new file mode 100644 index 0000000..fab9f1e Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__pycache__/pydevd_plugins_django_form_str.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_helpers.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_helpers.py new file mode 120000 index 0000000..f230e03 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_helpers.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d1/f1/51/eb780638254d587a7e7bc125e8e3e50659634c9c027b5a042565cfbf93 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py new file mode 120000 index 0000000..5520e49 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/60/ac/69/b82320ba66e3702f781a4f75872dbaa1b0cc3a50796770b199a478f7ad \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_pandas_types.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_pandas_types.py new file mode 100644 index 0000000..2c1090c --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_pandas_types.py @@ -0,0 +1,160 @@ +import sys + +from _pydevd_bundle.pydevd_constants import PANDAS_MAX_ROWS, PANDAS_MAX_COLS, PANDAS_MAX_COLWIDTH +from _pydevd_bundle.pydevd_extension_api import TypeResolveProvider, StrPresentationProvider +from _pydevd_bundle.pydevd_resolver import inspect, MethodWrapperType +from _pydevd_bundle.pydevd_utils import Timer + +from .pydevd_helpers import find_mod_attr +from contextlib import contextmanager + + +def _get_dictionary(obj, replacements): + ret = dict() + cls = obj.__class__ + for attr_name in dir(obj): + + # This is interesting but it actually hides too much info from the dataframe. + # attr_type_in_cls = type(getattr(cls, attr_name, None)) + # if attr_type_in_cls == property: + # ret[attr_name] = '' + # continue + + timer = Timer() + try: + replacement = replacements.get(attr_name) + if replacement is not None: + ret[attr_name] = replacement + continue + + attr_value = getattr(obj, attr_name, '') + if inspect.isroutine(attr_value) or isinstance(attr_value, MethodWrapperType): + continue + ret[attr_name] = attr_value + except Exception as e: + ret[attr_name] = '' % (e,) + finally: + timer.report_if_getting_attr_slow(cls, attr_name) + + return ret + + +@contextmanager +def customize_pandas_options(): + # The default repr depends on the settings of: + # + # pandas.set_option('display.max_columns', None) + # pandas.set_option('display.max_rows', None) + # + # which can make the repr **very** slow on some cases, so, we customize pandas to have + # smaller values if the current values are too big. + custom_options = [] + + from pandas import get_option + + max_rows = get_option("display.max_rows") + max_cols = get_option("display.max_columns") + max_colwidth = get_option("display.max_colwidth") + + if max_rows is None or max_rows > PANDAS_MAX_ROWS: + custom_options.append("display.max_rows") + custom_options.append(PANDAS_MAX_ROWS) + + if max_cols is None or max_cols > PANDAS_MAX_COLS: + custom_options.append("display.max_columns") + custom_options.append(PANDAS_MAX_COLS) + + if max_colwidth is None or max_colwidth > PANDAS_MAX_COLWIDTH: + custom_options.append("display.max_colwidth") + custom_options.append(PANDAS_MAX_COLWIDTH) + + if custom_options: + from pandas import option_context + with option_context(*custom_options): + yield + else: + yield + + +class PandasDataFrameTypeResolveProvider(object): + + def can_provide(self, type_object, type_name): + data_frame_class = find_mod_attr('pandas.core.frame', 'DataFrame') + return data_frame_class is not None and issubclass(type_object, data_frame_class) + + def resolve(self, obj, attribute): + return getattr(obj, attribute) + + def get_dictionary(self, obj): + replacements = { + # This actually calls: DataFrame.transpose(), which can be expensive, so, + # let's just add some string representation for it. + 'T': '', + + # This creates a whole new dict{index: Series) for each column. Doing a + # subsequent repr() from this dict can be very slow, so, don't return it. + '_series': '', + + 'style': '', + } + return _get_dictionary(obj, replacements) + + def get_str(self, df): + with customize_pandas_options(): + return repr(df) + + +class PandasSeriesTypeResolveProvider(object): + + def can_provide(self, type_object, type_name): + series_class = find_mod_attr('pandas.core.series', 'Series') + return series_class is not None and issubclass(type_object, series_class) + + def resolve(self, obj, attribute): + return getattr(obj, attribute) + + def get_dictionary(self, obj): + replacements = { + # This actually calls: DataFrame.transpose(), which can be expensive, so, + # let's just add some string representation for it. + 'T': '', + + # This creates a whole new dict{index: Series) for each column. Doing a + # subsequent repr() from this dict can be very slow, so, don't return it. + '_series': '', + + 'style': '', + } + return _get_dictionary(obj, replacements) + + def get_str(self, series): + with customize_pandas_options(): + return repr(series) + + +class PandasStylerTypeResolveProvider(object): + + def can_provide(self, type_object, type_name): + series_class = find_mod_attr('pandas.io.formats.style', 'Styler') + return series_class is not None and issubclass(type_object, series_class) + + def resolve(self, obj, attribute): + return getattr(obj, attribute) + + def get_dictionary(self, obj): + replacements = { + 'data': '', + + '__dict__': '', + } + return _get_dictionary(obj, replacements) + + +if not sys.platform.startswith("java"): + TypeResolveProvider.register(PandasDataFrameTypeResolveProvider) + StrPresentationProvider.register(PandasDataFrameTypeResolveProvider) + + TypeResolveProvider.register(PandasSeriesTypeResolveProvider) + StrPresentationProvider.register(PandasSeriesTypeResolveProvider) + + TypeResolveProvider.register(PandasStylerTypeResolveProvider) diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugins_django_form_str.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugins_django_form_str.py new file mode 120000 index 0000000..8cae21f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugins_django_form_str.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/aa/ea/ab/4542ae2843e84f7ecc0656bbc09092b961e2a35748d5c10ad46d77a8ed \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/jinja2_debug.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/jinja2_debug.py new file mode 100644 index 0000000..a5e4a00 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/jinja2_debug.py @@ -0,0 +1,506 @@ +from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, JINJA2_SUSPEND +from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK +from pydevd_file_utils import canonical_normalized_path +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode +from _pydev_bundle import pydev_log +from pydevd_plugins.pydevd_line_validation import LineBreakpointWithLazyValidation, ValidationInfo +from _pydev_bundle.pydev_override import overrides +from _pydevd_bundle.pydevd_api import PyDevdAPI + + +class Jinja2LineBreakpoint(LineBreakpointWithLazyValidation): + + def __init__(self, canonical_normalized_filename, breakpoint_id, line, condition, func_name, expression, hit_condition=None, is_logpoint=False): + self.canonical_normalized_filename = canonical_normalized_filename + LineBreakpointWithLazyValidation.__init__(self, breakpoint_id, line, condition, func_name, expression, hit_condition=hit_condition, is_logpoint=is_logpoint) + + def __str__(self): + return "Jinja2LineBreakpoint: %s-%d" % (self.canonical_normalized_filename, self.line) + + +class _Jinja2ValidationInfo(ValidationInfo): + + @overrides(ValidationInfo._collect_valid_lines_in_template_uncached) + def _collect_valid_lines_in_template_uncached(self, template): + lineno_mapping = _get_frame_lineno_mapping(template) + if not lineno_mapping: + return set() + + return set(x[0] for x in lineno_mapping) + + +def add_line_breakpoint(plugin, pydb, type, canonical_normalized_filename, breakpoint_id, line, condition, expression, func_name, hit_condition=None, is_logpoint=False, add_breakpoint_result=None, on_changed_breakpoint_state=None): + if type == 'jinja2-line': + jinja2_line_breakpoint = Jinja2LineBreakpoint(canonical_normalized_filename, breakpoint_id, line, condition, func_name, expression, hit_condition=hit_condition, is_logpoint=is_logpoint) + if not hasattr(pydb, 'jinja2_breakpoints'): + _init_plugin_breaks(pydb) + + add_breakpoint_result.error_code = PyDevdAPI.ADD_BREAKPOINT_LAZY_VALIDATION + jinja2_line_breakpoint.add_breakpoint_result = add_breakpoint_result + jinja2_line_breakpoint.on_changed_breakpoint_state = on_changed_breakpoint_state + + return jinja2_line_breakpoint, pydb.jinja2_breakpoints + return None + + +def after_breakpoints_consolidated(plugin, py_db, canonical_normalized_filename, id_to_pybreakpoint, file_to_line_to_breakpoints): + jinja2_breakpoints_for_file = file_to_line_to_breakpoints.get(canonical_normalized_filename) + if not jinja2_breakpoints_for_file: + return + + if not hasattr(py_db, 'jinja2_validation_info'): + _init_plugin_breaks(py_db) + + # In general we validate the breakpoints only when the template is loaded, but if the template + # was already loaded, we can validate the breakpoints based on the last loaded value. + py_db.jinja2_validation_info.verify_breakpoints_from_template_cached_lines( + py_db, canonical_normalized_filename, jinja2_breakpoints_for_file) + + +def add_exception_breakpoint(plugin, pydb, type, exception): + if type == 'jinja2': + if not hasattr(pydb, 'jinja2_exception_break'): + _init_plugin_breaks(pydb) + pydb.jinja2_exception_break[exception] = True + return True + return False + + +def _init_plugin_breaks(pydb): + pydb.jinja2_exception_break = {} + pydb.jinja2_breakpoints = {} + + pydb.jinja2_validation_info = _Jinja2ValidationInfo() + + +def remove_all_exception_breakpoints(plugin, pydb): + if hasattr(pydb, 'jinja2_exception_break'): + pydb.jinja2_exception_break = {} + return True + return False + + +def remove_exception_breakpoint(plugin, pydb, type, exception): + if type == 'jinja2': + try: + del pydb.jinja2_exception_break[exception] + return True + except: + pass + return False + + +def get_breakpoints(plugin, pydb, type): + if type == 'jinja2-line': + return pydb.jinja2_breakpoints + return None + + +def _is_jinja2_render_call(frame): + try: + name = frame.f_code.co_name + if "__jinja_template__" in frame.f_globals and name in ("root", "loop", "macro") or name.startswith("block_"): + return True + return False + except: + pydev_log.exception() + return False + + +def _suspend_jinja2(pydb, thread, frame, cmd=CMD_SET_BREAK, message=None): + frame = Jinja2TemplateFrame(frame) + + if frame.f_lineno is None: + return None + + pydb.set_suspend(thread, cmd) + + thread.additional_info.suspend_type = JINJA2_SUSPEND + if cmd == CMD_ADD_EXCEPTION_BREAK: + # send exception name as message + if message: + message = str(message) + thread.additional_info.pydev_message = message + + return frame + + +def _is_jinja2_suspended(thread): + return thread.additional_info.suspend_type == JINJA2_SUSPEND + + +def _is_jinja2_context_call(frame): + return "_Context__obj" in frame.f_locals + + +def _is_jinja2_internal_function(frame): + return 'self' in frame.f_locals and frame.f_locals['self'].__class__.__name__ in \ + ('LoopContext', 'TemplateReference', 'Macro', 'BlockReference') + + +def _find_jinja2_render_frame(frame): + while frame is not None and not _is_jinja2_render_call(frame): + frame = frame.f_back + + return frame + +#======================================================================================================================= +# Jinja2 Frame +#======================================================================================================================= + + +class Jinja2TemplateFrame(object): + + IS_PLUGIN_FRAME = True + + def __init__(self, frame, original_filename=None, template_lineno=None): + + if original_filename is None: + original_filename = _get_jinja2_template_original_filename(frame) + + if template_lineno is None: + template_lineno = _get_jinja2_template_line(frame) + + self.back_context = None + if 'context' in frame.f_locals: + # sometimes we don't have 'context', e.g. in macros + self.back_context = frame.f_locals['context'] + self.f_code = FCode('template', original_filename) + self.f_lineno = template_lineno + self.f_back = frame + self.f_globals = {} + self.f_locals = self.collect_context(frame) + self.f_trace = None + + def _get_real_var_name(self, orig_name): + # replace leading number for local variables + parts = orig_name.split('_') + if len(parts) > 1 and parts[0].isdigit(): + return parts[1] + return orig_name + + def collect_context(self, frame): + res = {} + for k, v in frame.f_locals.items(): + if not k.startswith('l_'): + res[k] = v + elif v and not _is_missing(v): + res[self._get_real_var_name(k[2:])] = v + if self.back_context is not None: + for k, v in self.back_context.items(): + res[k] = v + return res + + def _change_variable(self, frame, name, value): + in_vars_or_parents = False + if 'context' in frame.f_locals: + if name in frame.f_locals['context'].parent: + self.back_context.parent[name] = value + in_vars_or_parents = True + if name in frame.f_locals['context'].vars: + self.back_context.vars[name] = value + in_vars_or_parents = True + + l_name = 'l_' + name + if l_name in frame.f_locals: + if in_vars_or_parents: + frame.f_locals[l_name] = self.back_context.resolve(name) + else: + frame.f_locals[l_name] = value + + +class Jinja2TemplateSyntaxErrorFrame(object): + + IS_PLUGIN_FRAME = True + + def __init__(self, frame, exception_cls_name, filename, lineno, f_locals): + self.f_code = FCode('Jinja2 %s' % (exception_cls_name,), filename) + self.f_lineno = lineno + self.f_back = frame + self.f_globals = {} + self.f_locals = f_locals + self.f_trace = None + + +def change_variable(plugin, frame, attr, expression): + if isinstance(frame, Jinja2TemplateFrame): + result = eval(expression, frame.f_globals, frame.f_locals) + frame._change_variable(frame.f_back, attr, result) + return result + return False + + +def _is_missing(item): + if item.__class__.__name__ == 'MissingType': + return True + return False + + +def _find_render_function_frame(frame): + # in order to hide internal rendering functions + old_frame = frame + try: + while not ('self' in frame.f_locals and frame.f_locals['self'].__class__.__name__ == 'Template' and \ + frame.f_code.co_name == 'render'): + frame = frame.f_back + if frame is None: + return old_frame + return frame + except: + return old_frame + + +def _get_jinja2_template_debug_info(frame): + frame_globals = frame.f_globals + + jinja_template = frame_globals.get('__jinja_template__') + + if jinja_template is None: + return None + + return _get_frame_lineno_mapping(jinja_template) + + +def _get_frame_lineno_mapping(jinja_template): + ''' + :rtype: list(tuple(int,int)) + :return: list((original_line, line_in_frame)) + ''' + # _debug_info is a string with the mapping from frame line to actual line + # i.e.: "5=13&8=14" + _debug_info = jinja_template._debug_info + if not _debug_info: + # Sometimes template contains only plain text. + return None + + # debug_info is a list with the mapping from frame line to actual line + # i.e.: [(5, 13), (8, 14)] + return jinja_template.debug_info + + +def _get_jinja2_template_line(frame): + debug_info = _get_jinja2_template_debug_info(frame) + if debug_info is None: + return None + + lineno = frame.f_lineno + + for pair in debug_info: + if pair[1] == lineno: + return pair[0] + + return None + + +def _convert_to_str(s): + return s + + +def _get_jinja2_template_original_filename(frame): + if '__jinja_template__' in frame.f_globals: + return _convert_to_str(frame.f_globals['__jinja_template__'].filename) + + return None + +#======================================================================================================================= +# Jinja2 Step Commands +#======================================================================================================================= + + +def has_exception_breaks(plugin): + if len(plugin.main_debugger.jinja2_exception_break) > 0: + return True + return False + + +def has_line_breaks(plugin): + for _canonical_normalized_filename, breakpoints in plugin.main_debugger.jinja2_breakpoints.items(): + if len(breakpoints) > 0: + return True + return False + + +def can_skip(plugin, pydb, frame): + if pydb.jinja2_breakpoints and _is_jinja2_render_call(frame): + filename = _get_jinja2_template_original_filename(frame) + if filename is not None: + canonical_normalized_filename = canonical_normalized_path(filename) + jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(canonical_normalized_filename) + if jinja2_breakpoints_for_file: + return False + + if pydb.jinja2_exception_break: + name = frame.f_code.co_name + + # errors in compile time + if name in ('template', 'top-level template code', '') or name.startswith('block '): + f_back = frame.f_back + module_name = '' + if f_back is not None: + module_name = f_back.f_globals.get('__name__', '') + if module_name.startswith('jinja2.'): + return False + + return True + + +def cmd_step_into(plugin, pydb, frame, event, args, stop_info, stop): + info = args[2] + thread = args[3] + plugin_stop = False + stop_info['jinja2_stop'] = False + if _is_jinja2_suspended(thread): + stop_info['jinja2_stop'] = event in ('call', 'line') and _is_jinja2_render_call(frame) + plugin_stop = stop_info['jinja2_stop'] + stop = False + if info.pydev_call_from_jinja2 is not None: + if _is_jinja2_internal_function(frame): + # if internal Jinja2 function was called, we sould continue debugging inside template + info.pydev_call_from_jinja2 = None + else: + # we go into python code from Jinja2 rendering frame + stop = True + + if event == 'call' and _is_jinja2_context_call(frame.f_back): + # we called function from context, the next step will be in function + info.pydev_call_from_jinja2 = 1 + + if event == 'return' and _is_jinja2_context_call(frame.f_back): + # we return from python code to Jinja2 rendering frame + info.pydev_step_stop = info.pydev_call_from_jinja2 + info.pydev_call_from_jinja2 = None + thread.additional_info.suspend_type = JINJA2_SUSPEND + stop = False + + # print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop_info", stop_info, \ + # "thread.additional_info.suspend_type", thread.additional_info.suspend_type + # print "event", event, "farme.locals", frame.f_locals + return stop, plugin_stop + + +def cmd_step_over(plugin, pydb, frame, event, args, stop_info, stop): + info = args[2] + thread = args[3] + plugin_stop = False + stop_info['jinja2_stop'] = False + if _is_jinja2_suspended(thread): + stop = False + + if info.pydev_call_inside_jinja2 is None: + if _is_jinja2_render_call(frame): + if event == 'call': + info.pydev_call_inside_jinja2 = frame.f_back + if event in ('line', 'return'): + info.pydev_call_inside_jinja2 = frame + else: + if event == 'line': + if _is_jinja2_render_call(frame) and info.pydev_call_inside_jinja2 is frame: + stop_info['jinja2_stop'] = True + plugin_stop = stop_info['jinja2_stop'] + if event == 'return': + if frame is info.pydev_call_inside_jinja2 and 'event' not in frame.f_back.f_locals: + info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame.f_back) + return stop, plugin_stop + else: + if event == 'return' and _is_jinja2_context_call(frame.f_back): + # we return from python code to Jinja2 rendering frame + info.pydev_call_from_jinja2 = None + info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame) + thread.additional_info.suspend_type = JINJA2_SUSPEND + stop = False + return stop, plugin_stop + # print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop", stop, "jinja_stop", jinja2_stop, \ + # "thread.additional_info.suspend_type", thread.additional_info.suspend_type + # print "event", event, "info.pydev_call_inside_jinja2", info.pydev_call_inside_jinja2 + # print "frame", frame, "frame.f_back", frame.f_back, "step_stop", info.pydev_step_stop + # print "is_context_call", _is_jinja2_context_call(frame) + # print "render", _is_jinja2_render_call(frame) + # print "-------------" + return stop, plugin_stop + + +def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd): + pydb = args[0] + thread = args[3] + if 'jinja2_stop' in stop_info and stop_info['jinja2_stop']: + frame = _suspend_jinja2(pydb, thread, frame, step_cmd) + if frame: + pydb.do_wait_suspend(thread, frame, event, arg) + return True + return False + + +def get_breakpoint(plugin, py_db, pydb_frame, frame, event, args): + py_db = args[0] + _filename = args[1] + info = args[2] + break_type = 'jinja2' + + if event == 'line' and info.pydev_state != STATE_SUSPEND and py_db.jinja2_breakpoints and _is_jinja2_render_call(frame): + + jinja_template = frame.f_globals.get('__jinja_template__') + if jinja_template is None: + return False, None, None, break_type + + original_filename = _get_jinja2_template_original_filename(frame) + if original_filename is not None: + pydev_log.debug("Jinja2 is rendering a template: %s", original_filename) + canonical_normalized_filename = canonical_normalized_path(original_filename) + jinja2_breakpoints_for_file = py_db.jinja2_breakpoints.get(canonical_normalized_filename) + + if jinja2_breakpoints_for_file: + + jinja2_validation_info = py_db.jinja2_validation_info + jinja2_validation_info.verify_breakpoints(py_db, canonical_normalized_filename, jinja2_breakpoints_for_file, jinja_template) + + template_lineno = _get_jinja2_template_line(frame) + if template_lineno is not None: + jinja2_breakpoint = jinja2_breakpoints_for_file.get(template_lineno) + if jinja2_breakpoint is not None: + new_frame = Jinja2TemplateFrame(frame, original_filename, template_lineno) + return True, jinja2_breakpoint, new_frame, break_type + + return False, None, None, break_type + + +def suspend(plugin, pydb, thread, frame, bp_type): + if bp_type == 'jinja2': + return _suspend_jinja2(pydb, thread, frame) + return None + + +def exception_break(plugin, pydb, pydb_frame, frame, args, arg): + pydb = args[0] + thread = args[3] + exception, value, trace = arg + if pydb.jinja2_exception_break and exception is not None: + exception_type = list(pydb.jinja2_exception_break.keys())[0] + if exception.__name__ in ('UndefinedError', 'TemplateNotFound', 'TemplatesNotFound'): + # errors in rendering + render_frame = _find_jinja2_render_frame(frame) + if render_frame: + suspend_frame = _suspend_jinja2(pydb, thread, render_frame, CMD_ADD_EXCEPTION_BREAK, message=exception_type) + if suspend_frame: + add_exception_to_frame(suspend_frame, (exception, value, trace)) + suspend_frame.f_back = frame + frame = suspend_frame + return True, frame + + elif exception.__name__ in ('TemplateSyntaxError', 'TemplateAssertionError'): + name = frame.f_code.co_name + + # errors in compile time + if name in ('template', 'top-level template code', '') or name.startswith('block '): + + f_back = frame.f_back + if f_back is not None: + module_name = f_back.f_globals.get('__name__', '') + + if module_name.startswith('jinja2.'): + # Jinja2 translates exception info and creates fake frame on his own + pydb_frame.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK) + add_exception_to_frame(frame, (exception, value, trace)) + thread.additional_info.suspend_type = JINJA2_SUSPEND + thread.additional_info.pydev_message = str(exception_type) + return True, frame + return None diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/pydevd_line_validation.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/pydevd_line_validation.py new file mode 100644 index 0000000..2b64a52 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_plugins/pydevd_line_validation.py @@ -0,0 +1,107 @@ +from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint +from _pydevd_bundle.pydevd_api import PyDevdAPI +import bisect +from _pydev_bundle import pydev_log + + +class LineBreakpointWithLazyValidation(LineBreakpoint): + + def __init__(self, *args, **kwargs): + LineBreakpoint.__init__(self, *args, **kwargs) + # This is the _AddBreakpointResult that'll be modified (and then re-sent on the + # on_changed_breakpoint_state). + self.add_breakpoint_result = None + + # The signature for the callback should be: + # on_changed_breakpoint_state(breakpoint_id: int, add_breakpoint_result: _AddBreakpointResult) + self.on_changed_breakpoint_state = None + + # When its state is checked (in which case it'd call on_changed_breakpoint_state if the + # state changed), we store a cache key in 'verified_cache_key' -- in case it changes + # we'd need to re-verify it (for instance, the template could have changed on disk). + self.verified_cache_key = None + + +class ValidationInfo(object): + + def __init__(self): + self._canonical_normalized_filename_to_last_template_lines = {} + + def _collect_valid_lines_in_template(self, template): + # We cache the lines in the template itself. Note that among requests the + # template may be a different instance (because the template contents could be + # changed on disk), but this may still be called multiple times during the + # same render session, so, caching is interesting. + lines_cache = getattr(template, '__pydevd_lines_cache__', None) + if lines_cache is not None: + lines, sorted_lines = lines_cache + return lines, sorted_lines + + lines = self._collect_valid_lines_in_template_uncached(template) + + lines = frozenset(lines) + sorted_lines = tuple(sorted(lines)) + template.__pydevd_lines_cache__ = lines, sorted_lines + return lines, sorted_lines + + def _collect_valid_lines_in_template_uncached(self, template): + raise NotImplementedError() + + def verify_breakpoints(self, py_db, canonical_normalized_filename, template_breakpoints_for_file, template): + ''' + This function should be called whenever a rendering is detected. + + :param str canonical_normalized_filename: + :param dict[int:LineBreakpointWithLazyValidation] template_breakpoints_for_file: + ''' + valid_lines_frozenset, sorted_lines = self._collect_valid_lines_in_template(template) + + self._canonical_normalized_filename_to_last_template_lines[canonical_normalized_filename] = valid_lines_frozenset, sorted_lines + self._verify_breakpoints_with_lines_collected(py_db, canonical_normalized_filename, template_breakpoints_for_file, valid_lines_frozenset, sorted_lines) + + def verify_breakpoints_from_template_cached_lines(self, py_db, canonical_normalized_filename, template_breakpoints_for_file): + ''' + This is used when the lines are already available (if just the template is available, + `verify_breakpoints` should be used instead). + ''' + cached = self._canonical_normalized_filename_to_last_template_lines.get(canonical_normalized_filename) + if cached is not None: + valid_lines_frozenset, sorted_lines = cached + self._verify_breakpoints_with_lines_collected(py_db, canonical_normalized_filename, template_breakpoints_for_file, valid_lines_frozenset, sorted_lines) + + def _verify_breakpoints_with_lines_collected(self, py_db, canonical_normalized_filename, template_breakpoints_for_file, valid_lines_frozenset, sorted_lines): + for line, template_bp in list(template_breakpoints_for_file.items()): # Note: iterate in a copy (we may mutate it). + if template_bp.verified_cache_key != valid_lines_frozenset: + template_bp.verified_cache_key = valid_lines_frozenset + valid = line in valid_lines_frozenset + + if not valid: + new_line = -1 + if sorted_lines: + # Adjust to the first preceding valid line. + idx = bisect.bisect_left(sorted_lines, line) + if idx > 0: + new_line = sorted_lines[idx - 1] + + if new_line >= 0 and new_line not in template_breakpoints_for_file: + # We just add it if found and if there's no existing breakpoint at that + # location. + if template_bp.add_breakpoint_result.error_code != PyDevdAPI.ADD_BREAKPOINT_NO_ERROR and template_bp.add_breakpoint_result.translated_line != new_line: + pydev_log.debug('Template breakpoint in %s in line: %s moved to line: %s', canonical_normalized_filename, line, new_line) + template_bp.add_breakpoint_result.error_code = PyDevdAPI.ADD_BREAKPOINT_NO_ERROR + template_bp.add_breakpoint_result.translated_line = new_line + + # Add it to a new line. + template_breakpoints_for_file.pop(line, None) + template_breakpoints_for_file[new_line] = template_bp + template_bp.on_changed_breakpoint_state(template_bp.breakpoint_id, template_bp.add_breakpoint_result) + else: + if template_bp.add_breakpoint_result.error_code != PyDevdAPI.ADD_BREAKPOINT_INVALID_LINE: + pydev_log.debug('Template breakpoint in %s in line: %s invalid (valid lines: %s)', canonical_normalized_filename, line, valid_lines_frozenset) + template_bp.add_breakpoint_result.error_code = PyDevdAPI.ADD_BREAKPOINT_INVALID_LINE + template_bp.on_changed_breakpoint_state(template_bp.breakpoint_id, template_bp.add_breakpoint_result) + else: + if template_bp.add_breakpoint_result.error_code != PyDevdAPI.ADD_BREAKPOINT_NO_ERROR: + template_bp.add_breakpoint_result.error_code = PyDevdAPI.ADD_BREAKPOINT_NO_ERROR + template_bp.on_changed_breakpoint_state(template_bp.breakpoint_id, template_bp.add_breakpoint_result) + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_tracing.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_tracing.py new file mode 100644 index 0000000..5061a53 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_tracing.py @@ -0,0 +1,375 @@ +from _pydevd_bundle.pydevd_constants import get_frame, IS_CPYTHON, IS_64BIT_PROCESS, IS_WINDOWS, \ + IS_LINUX, IS_MAC, DebugInfoHolder, LOAD_NATIVE_LIB_FLAG, \ + ENV_FALSE_LOWER_VALUES, ForkSafeLock +from _pydev_bundle._pydev_saved_modules import thread, threading +from _pydev_bundle import pydev_log, pydev_monkey +import os.path +import platform +import ctypes +from io import StringIO +import sys +import traceback + +_original_settrace = sys.settrace + + +class TracingFunctionHolder: + '''This class exists just to keep some variables (so that we don't keep them in the global namespace). + ''' + _original_tracing = None + _warn = True + _traceback_limit = 1 + _warnings_shown = {} + + +def get_exception_traceback_str(): + exc_info = sys.exc_info() + s = StringIO() + traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], file=s) + return s.getvalue() + + +def _get_stack_str(frame): + + msg = '\nIf this is needed, please check: ' + \ + '\nhttp://pydev.blogspot.com/2007/06/why-cant-pydev-debugger-work-with.html' + \ + '\nto see how to restore the debug tracing back correctly.\n' + + if TracingFunctionHolder._traceback_limit: + s = StringIO() + s.write('Call Location:\n') + traceback.print_stack(f=frame, limit=TracingFunctionHolder._traceback_limit, file=s) + msg = msg + s.getvalue() + + return msg + + +def _internal_set_trace(tracing_func): + if TracingFunctionHolder._warn: + frame = get_frame() + if frame is not None and frame.f_back is not None: + filename = frame.f_back.f_code.co_filename.lower() + if not filename.endswith('threading.py') and not filename.endswith('pydevd_tracing.py'): + + message = \ + '\nPYDEV DEBUGGER WARNING:' + \ + '\nsys.settrace() should not be used when the debugger is being used.' + \ + '\nThis may cause the debugger to stop working correctly.' + \ + '%s' % _get_stack_str(frame.f_back) + + if message not in TracingFunctionHolder._warnings_shown: + # only warn about each message once... + TracingFunctionHolder._warnings_shown[message] = 1 + sys.stderr.write('%s\n' % (message,)) + sys.stderr.flush() + + if TracingFunctionHolder._original_tracing: + TracingFunctionHolder._original_tracing(tracing_func) + + +_last_tracing_func_thread_local = threading.local() + + +def SetTrace(tracing_func): + _last_tracing_func_thread_local.tracing_func = tracing_func + + if tracing_func is not None: + if set_trace_to_threads(tracing_func, thread_idents=[thread.get_ident()], create_dummy_thread=False) == 0: + # If we can use our own tracer instead of the one from sys.settrace, do it (the reason + # is that this is faster than the Python version because we don't call + # PyFrame_FastToLocalsWithError and PyFrame_LocalsToFast at each event! + # (the difference can be huge when checking line events on frames as the + # time increases based on the number of local variables in the scope) + # See: InternalCallTrampoline (on the C side) for details. + return + + # If it didn't work (or if it was None), use the Python version. + set_trace = TracingFunctionHolder._original_tracing or sys.settrace + set_trace(tracing_func) + + +def reapply_settrace(): + try: + tracing_func = _last_tracing_func_thread_local.tracing_func + except AttributeError: + return + else: + SetTrace(tracing_func) + + +def replace_sys_set_trace_func(): + if TracingFunctionHolder._original_tracing is None: + TracingFunctionHolder._original_tracing = sys.settrace + sys.settrace = _internal_set_trace + + +def restore_sys_set_trace_func(): + if TracingFunctionHolder._original_tracing is not None: + sys.settrace = TracingFunctionHolder._original_tracing + TracingFunctionHolder._original_tracing = None + + +_lock = ForkSafeLock() + + +def _load_python_helper_lib(): + try: + # If it's already loaded, just return it. + return _load_python_helper_lib.__lib__ + except AttributeError: + pass + with _lock: + try: + return _load_python_helper_lib.__lib__ + except AttributeError: + pass + + lib = _load_python_helper_lib_uncached() + _load_python_helper_lib.__lib__ = lib + return lib + + +def get_python_helper_lib_filename(): + # Note: we have an independent (and similar -- but not equal) version of this method in + # `add_code_to_python_process.py` which should be kept synchronized with this one (we do a copy + # because the `pydevd_attach_to_process` is mostly independent and shouldn't be imported in the + # debugger -- the only situation where it's imported is if the user actually does an attach to + # process, through `attach_pydevd.py`, but this should usually be called from the IDE directly + # and not from the debugger). + libdir = os.path.join(os.path.dirname(__file__), 'pydevd_attach_to_process') + + arch = '' + if IS_WINDOWS: + # prefer not using platform.machine() when possible (it's a bit heavyweight as it may + # spawn a subprocess). + arch = os.environ.get("PROCESSOR_ARCHITEW6432", os.environ.get('PROCESSOR_ARCHITECTURE', '')) + + if not arch: + arch = platform.machine() + if not arch: + pydev_log.info('platform.machine() did not return valid value.') # This shouldn't happen... + return None + + if IS_WINDOWS: + extension = '.dll' + suffix_64 = 'amd64' + suffix_32 = 'x86' + + elif IS_LINUX: + extension = '.so' + suffix_64 = 'amd64' + suffix_32 = 'x86' + + elif IS_MAC: + extension = '.dylib' + suffix_64 = 'x86_64' + suffix_32 = 'x86' + + else: + pydev_log.info('Unable to set trace to all threads in platform: %s', sys.platform) + return None + + if arch.lower() not in ('amd64', 'x86', 'x86_64', 'i386', 'x86'): + # We don't support this processor by default. Still, let's support the case where the + # user manually compiled it himself with some heuristics. + # + # Ideally the user would provide a library in the format: "attach_." + # based on the way it's currently compiled -- see: + # - windows/compile_windows.bat + # - linux_and_mac/compile_linux.sh + # - linux_and_mac/compile_mac.sh + + try: + found = [name for name in os.listdir(libdir) if name.startswith('attach_') and name.endswith(extension)] + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1: + # There is no need to show this unless debug tracing is enabled. + pydev_log.exception('Error listing dir: %s', libdir) + return None + + expected_name = 'attach_' + arch + extension + expected_name_linux = 'attach_linux_' + arch + extension + + filename = None + if expected_name in found: # Heuristic: user compiled with "attach_." + filename = os.path.join(libdir, expected_name) + + elif IS_LINUX and expected_name_linux in found: # Heuristic: user compiled with "attach_linux_." + filename = os.path.join(libdir, expected_name_linux) + + elif len(found) == 1: # Heuristic: user removed all libraries and just left his own lib. + filename = os.path.join(libdir, found[0]) + + else: # Heuristic: there's one additional library which doesn't seem to be our own. Find the odd one. + filtered = [name for name in found if not name.endswith((suffix_64 + extension, suffix_32 + extension))] + if len(filtered) == 1: # If more than one is available we can't be sure... + filename = os.path.join(libdir, found[0]) + + if filename is None: + pydev_log.info( + 'Unable to set trace to all threads in arch: %s (did not find a %s lib in %s).', + arch, expected_name, libdir + + ) + return None + + pydev_log.info('Using %s lib in arch: %s.', filename, arch) + + else: + # Happy path for which we have pre-compiled binaries. + if IS_64BIT_PROCESS: + suffix = suffix_64 + else: + suffix = suffix_32 + + if IS_WINDOWS or IS_MAC: # just the extension changes + prefix = 'attach_' + elif IS_LINUX: # + prefix = 'attach_linux_' # historically it has a different name + else: + pydev_log.info('Unable to set trace to all threads in platform: %s', sys.platform) + return None + + filename = os.path.join(libdir, '%s%s%s' % (prefix, suffix, extension)) + + if not os.path.exists(filename): + pydev_log.critical('Expected: %s to exist.', filename) + return None + + return filename + + +def _load_python_helper_lib_uncached(): + if (not IS_CPYTHON or sys.version_info[:2] > (3, 11) + or hasattr(sys, 'gettotalrefcount') or LOAD_NATIVE_LIB_FLAG in ENV_FALSE_LOWER_VALUES): + pydev_log.info('Helper lib to set tracing to all threads not loaded.') + return None + + try: + filename = get_python_helper_lib_filename() + if filename is None: + return None + # Load as pydll so that we don't release the gil. + lib = ctypes.pydll.LoadLibrary(filename) + pydev_log.info('Successfully Loaded helper lib to set tracing to all threads.') + return lib + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1: + # Only show message if tracing is on (we don't have pre-compiled + # binaries for all architectures -- i.e.: ARM). + pydev_log.exception('Error loading: %s', filename) + return None + + +def set_trace_to_threads(tracing_func, thread_idents=None, create_dummy_thread=True): + assert tracing_func is not None + + ret = 0 + + # Note: use sys._current_frames() keys to get the thread ids because it'll return + # thread ids created in C/C++ where there's user code running, unlike the APIs + # from the threading module which see only threads created through it (unless + # a call for threading.current_thread() was previously done in that thread, + # in which case a dummy thread would've been created for it). + if thread_idents is None: + thread_idents = set(sys._current_frames().keys()) + + for t in threading.enumerate(): + # PY-44778: ignore pydevd threads and also add any thread that wasn't found on + # sys._current_frames() as some existing threads may not appear in + # sys._current_frames() but may be available through the `threading` module. + if getattr(t, 'pydev_do_not_trace', False): + thread_idents.discard(t.ident) + else: + thread_idents.add(t.ident) + + curr_ident = thread.get_ident() + curr_thread = threading._active.get(curr_ident) + + if curr_ident in thread_idents and len(thread_idents) != 1: + # The current thread must be updated first (because we need to set + # the reference to `curr_thread`). + thread_idents = list(thread_idents) + thread_idents.remove(curr_ident) + thread_idents.insert(0, curr_ident) + + for thread_ident in thread_idents: + # If that thread is not available in the threading module we also need to create a + # dummy thread for it (otherwise it'll be invisible to the debugger). + if create_dummy_thread: + if thread_ident not in threading._active: + + class _DummyThread(threading._DummyThread): + + def _set_ident(self): + # Note: Hack to set the thread ident that we want. + self._ident = thread_ident + + t = _DummyThread() + # Reset to the base class (don't expose our own version of the class). + t.__class__ = threading._DummyThread + + if thread_ident == curr_ident: + curr_thread = t + + with threading._active_limbo_lock: + # On Py2 it'll put in active getting the current indent, not using the + # ident that was set, so, we have to update it (should be harmless on Py3 + # so, do it always). + threading._active[thread_ident] = t + threading._active[curr_ident] = curr_thread + + if t.ident != thread_ident: + # Check if it actually worked. + pydev_log.critical('pydevd: creation of _DummyThread with fixed thread ident did not succeed.') + + # Some (ptvsd) tests failed because of this, so, leave it always disabled for now. + # show_debug_info = 1 if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1 else 0 + show_debug_info = 0 + + # Hack to increase _Py_TracingPossible. + # See comments on py_custom_pyeval_settrace.hpp + proceed = thread.allocate_lock() + proceed.acquire() + + def dummy_trace(frame, event, arg): + return dummy_trace + + def increase_tracing_count(): + set_trace = TracingFunctionHolder._original_tracing or sys.settrace + set_trace(dummy_trace) + proceed.release() + + start_new_thread = pydev_monkey.get_original_start_new_thread(thread) + start_new_thread(increase_tracing_count, ()) + proceed.acquire() # Only proceed after the release() is done. + proceed = None + + # Note: The set_trace_func is not really used anymore in the C side. + set_trace_func = TracingFunctionHolder._original_tracing or sys.settrace + + lib = _load_python_helper_lib() + if lib is None: # This is the case if it's not CPython. + pydev_log.info('Unable to load helper lib to set tracing to all threads (unsupported python vm).') + ret = -1 + else: + try: + result = lib.AttachDebuggerTracing( + ctypes.c_int(show_debug_info), + ctypes.py_object(set_trace_func), + ctypes.py_object(tracing_func), + ctypes.c_uint(thread_ident), + ctypes.py_object(None), + ) + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1: + # There is no need to show this unless debug tracing is enabled. + pydev_log.exception('Error attaching debugger tracing') + ret = -1 + else: + if result != 0: + pydev_log.info('Unable to set tracing for existing thread. Result: %s', result) + ret = result + + return ret + diff --git a/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/setup_pydevd_cython.py b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/setup_pydevd_cython.py new file mode 100644 index 0000000..5b395dd --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/setup_pydevd_cython.py @@ -0,0 +1,250 @@ +''' +A simpler setup version just to compile the speedup module. + +It should be used as: + +python setup_pydevd_cython build_ext --inplace + +Note: the .c file and other generated files are regenerated from +the .pyx file by running "python build_tools/build.py" +''' + +import os +import sys +from setuptools import setup + +os.chdir(os.path.dirname(os.path.abspath(__file__))) + +IS_PY36_OR_GREATER = sys.version_info > (3, 6) +TODO_PY311 = sys.version_info > (3, 11) + + +def process_args(): + extension_folder = None + target_pydevd_name = None + target_frame_eval = None + force_cython = False + + for i, arg in enumerate(sys.argv[:]): + if arg == '--build-lib': + extension_folder = sys.argv[i + 1] + # It shouldn't be removed from sys.argv (among with --build-temp) because they're passed further to setup() + if arg.startswith('--target-pyd-name='): + sys.argv.remove(arg) + target_pydevd_name = arg[len('--target-pyd-name='):] + if arg.startswith('--target-pyd-frame-eval='): + sys.argv.remove(arg) + target_frame_eval = arg[len('--target-pyd-frame-eval='):] + if arg == '--force-cython': + sys.argv.remove(arg) + force_cython = True + + return extension_folder, target_pydevd_name, target_frame_eval, force_cython + + +def process_template_lines(template_lines): + # Create 2 versions of the template, one for Python 3.8 and another for Python 3.9 + for version in ('38', '39'): + yield '### WARNING: GENERATED CODE, DO NOT EDIT!' + yield '### WARNING: GENERATED CODE, DO NOT EDIT!' + yield '### WARNING: GENERATED CODE, DO NOT EDIT!' + + for line in template_lines: + if version == '38': + line = line.replace('get_bytecode_while_frame_eval(PyFrameObject * frame_obj, int exc)', 'get_bytecode_while_frame_eval_38(PyFrameObject * frame_obj, int exc)') + line = line.replace('CALL_EvalFrameDefault', 'CALL_EvalFrameDefault_38(frame_obj, exc)') + else: # 3.9 + line = line.replace('get_bytecode_while_frame_eval(PyFrameObject * frame_obj, int exc)', 'get_bytecode_while_frame_eval_39(PyThreadState* tstate, PyFrameObject * frame_obj, int exc)') + line = line.replace('CALL_EvalFrameDefault', 'CALL_EvalFrameDefault_39(tstate, frame_obj, exc)') + + yield line + + yield '### WARNING: GENERATED CODE, DO NOT EDIT!' + yield '### WARNING: GENERATED CODE, DO NOT EDIT!' + yield '### WARNING: GENERATED CODE, DO NOT EDIT!' + yield '' + yield '' + + +def process_template_file(contents): + ret = [] + template_lines = [] + + append_to = ret + for line in contents.splitlines(keepends=False): + if line.strip() == '### TEMPLATE_START': + append_to = template_lines + elif line.strip() == '### TEMPLATE_END': + append_to = ret + for line in process_template_lines(template_lines): + ret.append(line) + else: + append_to.append(line) + + return '\n'.join(ret) + + +def build_extension(dir_name, extension_name, target_pydevd_name, force_cython, extended=False, has_pxd=False, template=False): + pyx_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pyx" % (extension_name,)) + + if template: + pyx_template_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.template.pyx" % (extension_name,)) + with open(pyx_template_file, 'r') as stream: + contents = stream.read() + + contents = process_template_file(contents) + + with open(pyx_file, 'w') as stream: + stream.write(contents) + + if target_pydevd_name != extension_name: + # It MUST be there in this case! + # (otherwise we'll have unresolved externals because the .c file had another name initially). + import shutil + + # We must force cython in this case (but only in this case -- for the regular setup in the user machine, we + # should always compile the .c file). + force_cython = True + + new_pyx_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pyx" % (target_pydevd_name,)) + new_c_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.c" % (target_pydevd_name,)) + shutil.copy(pyx_file, new_pyx_file) + pyx_file = new_pyx_file + if has_pxd: + pxd_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pxd" % (extension_name,)) + new_pxd_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pxd" % (target_pydevd_name,)) + shutil.copy(pxd_file, new_pxd_file) + assert os.path.exists(pyx_file) + + try: + c_files = [os.path.join(dir_name, "%s.c" % target_pydevd_name), ] + if force_cython: + for c_file in c_files: + try: + os.remove(c_file) + except: + pass + from Cython.Build import cythonize # @UnusedImport + # Generate the .c files in cythonize (will not compile at this point). + + target = "%s/%s.pyx" % (dir_name, target_pydevd_name,) + cythonize([target]) + + # Workarounds needed in CPython 3.8 and 3.9 to access PyInterpreterState.eval_frame. + for c_file in c_files: + with open(c_file, 'r') as stream: + c_file_contents = stream.read() + + if '#include "internal/pycore_gc.h"' not in c_file_contents: + c_file_contents = c_file_contents.replace('#include "Python.h"', '''#include "Python.h" +#if PY_VERSION_HEX >= 0x03090000 +#include "internal/pycore_gc.h" +#include "internal/pycore_interp.h" +#endif +''') + + if '#include "internal/pycore_pystate.h"' not in c_file_contents: + c_file_contents = c_file_contents.replace('#include "pystate.h"', '''#include "pystate.h" +#if PY_VERSION_HEX >= 0x03080000 +#include "internal/pycore_pystate.h" +#endif +''') + + # We want the same output on Windows and Linux. + c_file_contents = c_file_contents.replace('\r\n', '\n').replace('\r', '\n') + c_file_contents = c_file_contents.replace(r'_pydevd_frame_eval\\release_mem.h', '_pydevd_frame_eval/release_mem.h') + c_file_contents = c_file_contents.replace(r'_pydevd_frame_eval\\pydevd_frame_evaluator.pyx', '_pydevd_frame_eval/pydevd_frame_evaluator.pyx') + c_file_contents = c_file_contents.replace(r'_pydevd_bundle\\pydevd_cython.pxd', '_pydevd_bundle/pydevd_cython.pxd') + c_file_contents = c_file_contents.replace(r'_pydevd_bundle\\pydevd_cython.pyx', '_pydevd_bundle/pydevd_cython.pyx') + + with open(c_file, 'w') as stream: + stream.write(c_file_contents) + + # Always compile the .c (and not the .pyx) file (which we should keep up-to-date by running build_tools/build.py). + from distutils.extension import Extension + extra_compile_args = [] + extra_link_args = [] + + if 'linux' in sys.platform: + # Enabling -flto brings executable from 4MB to 0.56MB and -Os to 0.41MB + # Profiling shows an execution around 3-5% slower with -Os vs -O3, + # so, kept only -flto. + extra_compile_args = ["-flto", "-O3"] + extra_link_args = extra_compile_args[:] + + # Note: also experimented with profile-guided optimization. The executable + # size became a bit smaller (from 0.56MB to 0.5MB) but this would add an + # extra step to run the debugger to obtain the optimizations + # so, skipped it for now (note: the actual benchmarks time was in the + # margin of a 0-1% improvement, which is probably not worth it for + # speed increments). + # extra_compile_args = ["-flto", "-fprofile-generate"] + # ... Run benchmarks ... + # extra_compile_args = ["-flto", "-fprofile-use", "-fprofile-correction"] + elif 'win32' in sys.platform: + pass + # uncomment to generate pdbs for visual studio. + # extra_compile_args=["-Zi", "/Od"] + # extra_link_args=["-debug"] + + kwargs = {} + if extra_link_args: + kwargs['extra_link_args'] = extra_link_args + if extra_compile_args: + kwargs['extra_compile_args'] = extra_compile_args + + ext_modules = [ + Extension( + "%s%s.%s" % (dir_name, "_ext" if extended else "", target_pydevd_name,), + c_files, + **kwargs + )] + + # This is needed in CPython 3.8 to be able to include internal/pycore_pystate.h + # (needed to set PyInterpreterState.eval_frame). + for module in ext_modules: + module.define_macros = [('Py_BUILD_CORE_MODULE', '1')] + setup( + name='Cythonize', + ext_modules=ext_modules + ) + finally: + if target_pydevd_name != extension_name: + try: + os.remove(new_pyx_file) + except: + import traceback + traceback.print_exc() + try: + os.remove(new_c_file) + except: + import traceback + traceback.print_exc() + if has_pxd: + try: + os.remove(new_pxd_file) + except: + import traceback + traceback.print_exc() + + +extension_folder, target_pydevd_name, target_frame_eval, force_cython = process_args() + +extension_name = "pydevd_cython" +if target_pydevd_name is None: + target_pydevd_name = extension_name +build_extension("_pydevd_bundle", extension_name, target_pydevd_name, force_cython, extension_folder, True) + +if IS_PY36_OR_GREATER and not TODO_PY311: + extension_name = "pydevd_frame_evaluator" + if target_frame_eval is None: + target_frame_eval = extension_name + build_extension("_pydevd_frame_eval", extension_name, target_frame_eval, force_cython, extension_folder, True, template=True) + +if extension_folder: + os.chdir(extension_folder) + for folder in [file for file in os.listdir(extension_folder) if + file != 'build' and os.path.isdir(os.path.join(extension_folder, file))]: + file = os.path.join(folder, "__init__.py") + if not os.path.exists(file): + open(file, 'a').close() diff --git a/venv/lib/python3.8/site-packages/debugpy/_version.py b/venv/lib/python3.8/site-packages/debugpy/_version.py new file mode 100644 index 0000000..4fb2fa4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/_version.py @@ -0,0 +1,21 @@ + +# This file was generated by 'versioneer.py' (0.18) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +{ + "date": "2022-08-15T14:32:56-0700", + "dirty": false, + "error": null, + "full-revisionid": "8b5eeee7e0b1678e701b07d94e6c3cf07b923597", + "version": "1.6.3" +} +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/__init__.py b/venv/lib/python3.8/site-packages/debugpy/adapter/__init__.py new file mode 100644 index 0000000..fa55b25 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/adapter/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from __future__ import annotations +import typing + +if typing.TYPE_CHECKING: + __all__: list[str] + +__all__ = [] + +access_token = None +"""Access token used to authenticate with this adapter.""" diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/__main__.py b/venv/lib/python3.8/site-packages/debugpy/adapter/__main__.py new file mode 100644 index 0000000..54d80cb --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/adapter/__main__.py @@ -0,0 +1,224 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import argparse +import atexit +import codecs +import locale +import os +import sys + +# WARNING: debugpy and submodules must not be imported on top level in this module, +# and should be imported locally inside main() instead. + + +def main(args): + # If we're talking DAP over stdio, stderr is not guaranteed to be read from, + # so disable it to avoid the pipe filling and locking up. This must be done + # as early as possible, before the logging module starts writing to it. + if args.port is None: + sys.stderr = stderr = open(os.devnull, "w") + atexit.register(stderr.close) + + from debugpy import adapter + from debugpy.common import json, log, sockets + from debugpy.adapter import clients, servers, sessions + + if args.for_server is not None: + if os.name == "posix": + # On POSIX, we need to leave the process group and its session, and then + # daemonize properly by double-forking (first fork already happened when + # this process was spawned). + os.setsid() + if os.fork() != 0: + sys.exit(0) + + for stdio in sys.stdin, sys.stdout, sys.stderr: + if stdio is not None: + stdio.close() + + if args.log_stderr: + log.stderr.levels |= set(log.LEVELS) + if args.log_dir is not None: + log.log_dir = args.log_dir + + log.to_file(prefix="debugpy.adapter") + log.describe_environment("debugpy.adapter startup environment:") + + servers.access_token = args.server_access_token + if args.for_server is None: + adapter.access_token = codecs.encode(os.urandom(32), "hex").decode("ascii") + + endpoints = {} + try: + client_host, client_port = clients.serve(args.host, args.port) + except Exception as exc: + if args.for_server is None: + raise + endpoints = {"error": "Can't listen for client connections: " + str(exc)} + else: + endpoints["client"] = {"host": client_host, "port": client_port} + + if args.for_server is not None: + try: + server_host, server_port = servers.serve() + except Exception as exc: + endpoints = {"error": "Can't listen for server connections: " + str(exc)} + else: + endpoints["server"] = {"host": server_host, "port": server_port} + + log.info( + "Sending endpoints info to debug server at localhost:{0}:\n{1}", + args.for_server, + json.repr(endpoints), + ) + + try: + sock = sockets.create_client() + try: + sock.settimeout(None) + sock.connect(("127.0.0.1", args.for_server)) + sock_io = sock.makefile("wb", 0) + try: + sock_io.write(json.dumps(endpoints).encode("utf-8")) + finally: + sock_io.close() + finally: + sockets.close_socket(sock) + except Exception: + log.reraise_exception("Error sending endpoints info to debug server:") + + if "error" in endpoints: + log.error("Couldn't set up endpoints; exiting.") + sys.exit(1) + + listener_file = os.getenv("DEBUGPY_ADAPTER_ENDPOINTS") + if listener_file is not None: + log.info( + "Writing endpoints info to {0!r}:\n{1}", listener_file, json.repr(endpoints) + ) + + def delete_listener_file(): + log.info("Listener ports closed; deleting {0!r}", listener_file) + try: + os.remove(listener_file) + except Exception: + log.swallow_exception( + "Failed to delete {0!r}", listener_file, level="warning" + ) + + try: + with open(listener_file, "w") as f: + atexit.register(delete_listener_file) + print(json.dumps(endpoints), file=f) + except Exception: + log.reraise_exception("Error writing endpoints info to file:") + + if args.port is None: + clients.Client("stdio") + + # These must be registered after the one above, to ensure that the listener sockets + # are closed before the endpoint info file is deleted - this way, another process + # can wait for the file to go away as a signal that the ports are no longer in use. + atexit.register(servers.stop_serving) + atexit.register(clients.stop_serving) + + servers.wait_until_disconnected() + log.info("All debug servers disconnected; waiting for remaining sessions...") + + sessions.wait_until_ended() + log.info("All debug sessions have ended; exiting.") + + +def _parse_argv(argv): + parser = argparse.ArgumentParser() + + parser.add_argument( + "--for-server", type=int, metavar="PORT", help=argparse.SUPPRESS + ) + + parser.add_argument( + "--port", + type=int, + default=None, + metavar="PORT", + help="start the adapter in debugServer mode on the specified port", + ) + + parser.add_argument( + "--host", + type=str, + default="127.0.0.1", + metavar="HOST", + help="start the adapter in debugServer mode on the specified host", + ) + + parser.add_argument( + "--access-token", type=str, help="access token expected from the server" + ) + + parser.add_argument( + "--server-access-token", type=str, help="access token expected by the server" + ) + + parser.add_argument( + "--log-dir", + type=str, + metavar="DIR", + help="enable logging and use DIR to save adapter logs", + ) + + parser.add_argument( + "--log-stderr", action="store_true", help="enable logging to stderr" + ) + + args = parser.parse_args(argv[1:]) + + if args.port is None: + if args.log_stderr: + parser.error("--log-stderr requires --port") + if args.for_server is not None: + parser.error("--for-server requires --port") + + return args + + +if __name__ == "__main__": + # debugpy can also be invoked directly rather than via -m. In this case, the first + # entry on sys.path is the one added automatically by Python for the directory + # containing this file. This means that import debugpy will not work, since we need + # the parent directory of debugpy/ to be in sys.path, rather than debugpy/adapter/. + # + # The other issue is that many other absolute imports will break, because they + # will be resolved relative to debugpy/adapter/ - e.g. `import state` will then try + # to import debugpy/adapter/state.py. + # + # To fix both, we need to replace the automatically added entry such that it points + # at parent directory of debugpy/ instead of debugpy/adapter, import debugpy with that + # in sys.path, and then remove the first entry entry altogether, so that it doesn't + # affect any further imports we might do. For example, suppose the user did: + # + # python /foo/bar/debugpy/adapter ... + # + # At the beginning of this script, sys.path will contain "/foo/bar/debugpy/adapter" + # as the first entry. What we want is to replace it with "/foo/bar', then import + # debugpy with that in effect, and then remove the replaced entry before any more + # code runs. The imported debugpy module will remain in sys.modules, and thus all + # future imports of it or its submodules will resolve accordingly. + if "debugpy" not in sys.modules: + # Do not use dirname() to walk up - this can be a relative path, e.g. ".". + sys.path[0] = sys.path[0] + "/../../" + __import__("debugpy") + del sys.path[0] + + # Apply OS-global and user-specific locale settings. + try: + locale.setlocale(locale.LC_ALL, "") + except Exception: + # On POSIX, locale is set via environment variables, and this can fail if + # those variables reference a non-existing locale. Ignore and continue using + # the default "C" locale if so. + pass + + main(_parse_argv(sys.argv)) diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..402f522 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/__main__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000..b7fdf96 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/__main__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/clients.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/clients.cpython-38.pyc new file mode 100644 index 0000000..d19a581 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/clients.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/components.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/components.cpython-38.pyc new file mode 100644 index 0000000..d14118b Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/components.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/launchers.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/launchers.cpython-38.pyc new file mode 100644 index 0000000..193099a Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/launchers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/servers.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/servers.cpython-38.pyc new file mode 100644 index 0000000..fd307a9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/servers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/sessions.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/sessions.cpython-38.pyc new file mode 100644 index 0000000..deff839 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/adapter/__pycache__/sessions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/clients.py b/venv/lib/python3.8/site-packages/debugpy/adapter/clients.py new file mode 100644 index 0000000..82985e6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/adapter/clients.py @@ -0,0 +1,703 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from __future__ import annotations + +import atexit +import os +import sys + +import debugpy +from debugpy import adapter, common, launcher +from debugpy.common import json, log, messaging, sockets +from debugpy.adapter import components, servers, sessions + + +class Client(components.Component): + """Handles the client side of a debug session.""" + + message_handler = components.Component.message_handler + + known_subprocesses: set[servers.Connection] + """Server connections to subprocesses that this client has been made aware of. + """ + + class Capabilities(components.Capabilities): + PROPERTIES = { + "supportsVariableType": False, + "supportsVariablePaging": False, + "supportsRunInTerminalRequest": False, + "supportsMemoryReferences": False, + "supportsArgsCanBeInterpretedByShell": False, + } + + class Expectations(components.Capabilities): + PROPERTIES = { + "locale": "en-US", + "linesStartAt1": True, + "columnsStartAt1": True, + "pathFormat": json.enum("path", optional=True), # we don't support "uri" + } + + def __init__(self, sock): + if sock == "stdio": + log.info("Connecting to client over stdio...", self) + stream = messaging.JsonIOStream.from_stdio() + # Make sure that nothing else tries to interfere with the stdio streams + # that are going to be used for DAP communication from now on. + sys.stdin = stdin = open(os.devnull, "r") + atexit.register(stdin.close) + sys.stdout = stdout = open(os.devnull, "w") + atexit.register(stdout.close) + else: + stream = messaging.JsonIOStream.from_socket(sock) + + with sessions.Session() as session: + super().__init__(session, stream) + + self.client_id = None + """ID of the connecting client. This can be 'test' while running tests.""" + + self.has_started = False + """Whether the "launch" or "attach" request was received from the client, and + fully handled. + """ + + self.start_request = None + """The "launch" or "attach" request as received from the client. + """ + + self._initialize_request = None + """The "initialize" request as received from the client, to propagate to the + server later.""" + + self._deferred_events = [] + """Deferred events from the launcher and the server that must be propagated + only if and when the "launch" or "attach" response is sent. + """ + + self.known_subprocesses = set() + + session.client = self + session.register() + + # For the transition period, send the telemetry events with both old and new + # name. The old one should be removed once the new one lights up. + self.channel.send_event( + "output", + { + "category": "telemetry", + "output": "ptvsd", + "data": {"packageVersion": debugpy.__version__}, + }, + ) + self.channel.send_event( + "output", + { + "category": "telemetry", + "output": "debugpy", + "data": {"packageVersion": debugpy.__version__}, + }, + ) + + def propagate_after_start(self, event): + # pydevd starts sending events as soon as we connect, but the client doesn't + # expect to see any until it receives the response to "launch" or "attach" + # request. If client is not ready yet, save the event instead of propagating + # it immediately. + if self._deferred_events is not None: + self._deferred_events.append(event) + log.debug("Propagation deferred.") + else: + self.client.channel.propagate(event) + + def _propagate_deferred_events(self): + log.debug("Propagating deferred events to {0}...", self.client) + for event in self._deferred_events: + log.debug("Propagating deferred {0}", event.describe()) + self.client.channel.propagate(event) + log.info("All deferred events propagated to {0}.", self.client) + self._deferred_events = None + + # Generic event handler. There are no specific handlers for client events, because + # there are no events from the client in DAP - but we propagate them if we can, in + # case some events appear in future protocol versions. + @message_handler + def event(self, event): + if self.server: + self.server.channel.propagate(event) + + # Generic request handler, used if there's no specific handler below. + @message_handler + def request(self, request): + return self.server.channel.delegate(request) + + @message_handler + def initialize_request(self, request): + if self._initialize_request is not None: + raise request.isnt_valid("Session is already initialized") + + self.client_id = request("clientID", "") + self.capabilities = self.Capabilities(self, request) + self.expectations = self.Expectations(self, request) + self._initialize_request = request + + exception_breakpoint_filters = [ + { + "filter": "raised", + "label": "Raised Exceptions", + "default": False, + "description": "Break whenever any exception is raised.", + }, + { + "filter": "uncaught", + "label": "Uncaught Exceptions", + "default": True, + "description": "Break when the process is exiting due to unhandled exception.", + }, + { + "filter": "userUnhandled", + "label": "User Uncaught Exceptions", + "default": False, + "description": "Break when exception escapes into library code.", + }, + ] + + return { + "supportsCompletionsRequest": True, + "supportsConditionalBreakpoints": True, + "supportsConfigurationDoneRequest": True, + "supportsDebuggerProperties": True, + "supportsDelayedStackTraceLoading": True, + "supportsEvaluateForHovers": True, + "supportsExceptionInfoRequest": True, + "supportsExceptionOptions": True, + "supportsFunctionBreakpoints": True, + "supportsHitConditionalBreakpoints": True, + "supportsLogPoints": True, + "supportsModulesRequest": True, + "supportsSetExpression": True, + "supportsSetVariable": True, + "supportsValueFormattingOptions": True, + "supportsTerminateDebuggee": True, + "supportsGotoTargetsRequest": True, + "supportsClipboardContext": True, + "exceptionBreakpointFilters": exception_breakpoint_filters, + "supportsStepInTargetsRequest": True, + } + + # Common code for "launch" and "attach" request handlers. + # + # See https://github.com/microsoft/vscode/issues/4902#issuecomment-368583522 + # for the sequence of request and events necessary to orchestrate the start. + def _start_message_handler(f): + @components.Component.message_handler + def handle(self, request): + assert request.is_request("launch", "attach") + if self._initialize_request is None: + raise request.isnt_valid("Session is not initialized yet") + if self.launcher or self.server: + raise request.isnt_valid("Session is already started") + + self.session.no_debug = request("noDebug", json.default(False)) + if self.session.no_debug: + servers.dont_wait_for_first_connection() + + self.session.debug_options = debug_options = set( + request("debugOptions", json.array(str)) + ) + + f(self, request) + if request.response is not None: + return + + if self.server: + self.server.initialize(self._initialize_request) + self._initialize_request = None + + arguments = request.arguments + if self.launcher: + redirecting = arguments.get("console") == "internalConsole" + if "RedirectOutput" in debug_options: + # The launcher is doing output redirection, so we don't need the + # server to do it, as well. + arguments = dict(arguments) + arguments["debugOptions"] = list( + debug_options - {"RedirectOutput"} + ) + redirecting = True + + if arguments.get("redirectOutput"): + arguments = dict(arguments) + del arguments["redirectOutput"] + redirecting = True + + arguments["isOutputRedirected"] = redirecting + + # pydevd doesn't send "initialized", and responds to the start request + # immediately, without waiting for "configurationDone". If it changes + # to conform to the DAP spec, we'll need to defer waiting for response. + try: + self.server.channel.request(request.command, arguments) + except messaging.NoMoreMessages: + # Server closed connection before we could receive the response to + # "attach" or "launch" - this can happen when debuggee exits shortly + # after starting. It's not an error, but we can't do anything useful + # here at this point, either, so just bail out. + request.respond({}) + self.session.finalize( + "{0} disconnected before responding to {1}".format( + self.server, + json.repr(request.command), + ) + ) + return + except messaging.MessageHandlingError as exc: + exc.propagate(request) + + if self.session.no_debug: + self.start_request = request + self.has_started = True + request.respond({}) + self._propagate_deferred_events() + return + + if "clientOS" in request: + client_os = request("clientOS", json.enum("windows", "unix")).upper() + elif {"WindowsClient", "Windows"} & debug_options: + client_os = "WINDOWS" + elif {"UnixClient", "UNIX"} & debug_options: + client_os = "UNIX" + else: + client_os = "WINDOWS" if sys.platform == "win32" else "UNIX" + self.server.channel.request( + "setDebuggerProperty", + { + "skipSuspendOnBreakpointException": ("BaseException",), + "skipPrintBreakpointException": ("NameError",), + "multiThreadsSingleNotification": True, + "ideOS": client_os, + }, + ) + + # Let the client know that it can begin configuring the adapter. + self.channel.send_event("initialized") + + self.start_request = request + return messaging.NO_RESPONSE # will respond on "configurationDone" + + return handle + + @_start_message_handler + def launch_request(self, request): + from debugpy.adapter import launchers + + if self.session.id != 1 or len(servers.connections()): + raise request.cant_handle('"attach" expected') + + debug_options = set(request("debugOptions", json.array(str))) + + # Handling of properties that can also be specified as legacy "debugOptions" flags. + # If property is explicitly set to false, but the flag is in "debugOptions", treat + # it as an error. Returns None if the property wasn't explicitly set either way. + def property_or_debug_option(prop_name, flag_name): + assert prop_name[0].islower() and flag_name[0].isupper() + + value = request(prop_name, bool, optional=True) + if value == (): + value = None + + if flag_name in debug_options: + if value is False: + raise request.isnt_valid( + '{0}:false and "debugOptions":[{1}] are mutually exclusive', + json.repr(prop_name), + json.repr(flag_name), + ) + value = True + + return value + + # "pythonPath" is a deprecated legacy spelling. If "python" is missing, then try + # the alternative. But if both are missing, the error message should say "python". + python_key = "python" + if python_key in request: + if "pythonPath" in request: + raise request.isnt_valid( + '"pythonPath" is not valid if "python" is specified' + ) + elif "pythonPath" in request: + python_key = "pythonPath" + python = request(python_key, json.array(str, vectorize=True, size=(0,))) + if not len(python): + python = [sys.executable] + + python += request("pythonArgs", json.array(str, size=(0,))) + request.arguments["pythonArgs"] = python[1:] + request.arguments["python"] = python + + launcher_python = request("debugLauncherPython", str, optional=True) + if launcher_python == (): + launcher_python = python[0] + + program = module = code = () + if "program" in request: + program = request("program", str) + args = [program] + request.arguments["processName"] = program + if "module" in request: + module = request("module", str) + args = ["-m", module] + request.arguments["processName"] = module + if "code" in request: + code = request("code", json.array(str, vectorize=True, size=(1,))) + args = ["-c", "\n".join(code)] + request.arguments["processName"] = "-c" + + num_targets = len([x for x in (program, module, code) if x != ()]) + if num_targets == 0: + raise request.isnt_valid( + 'either "program", "module", or "code" must be specified' + ) + elif num_targets != 1: + raise request.isnt_valid( + '"program", "module", and "code" are mutually exclusive' + ) + + console = request( + "console", + json.enum( + "internalConsole", + "integratedTerminal", + "externalTerminal", + optional=True, + ), + ) + console_title = request("consoleTitle", json.default("Python Debug Console")) + + # Propagate "args" via CLI so that shell expansion can be applied if requested. + target_args = request("args", json.array(str, vectorize=True)) + args += target_args + + # If "args" was a single string rather than an array, shell expansion must be applied. + shell_expand_args = len(target_args) > 0 and isinstance( + request.arguments["args"], str + ) + if shell_expand_args: + if not self.capabilities["supportsArgsCanBeInterpretedByShell"]: + raise request.isnt_valid( + 'Shell expansion in "args" is not supported by the client' + ) + if console == "internalConsole": + raise request.isnt_valid( + 'Shell expansion in "args" is not available for "console":"internalConsole"' + ) + + cwd = request("cwd", str, optional=True) + if cwd == (): + # If it's not specified, but we're launching a file rather than a module, + # and the specified path has a directory in it, use that. + cwd = None if program == () else (os.path.dirname(program) or None) + + sudo = bool(property_or_debug_option("sudo", "Sudo")) + if sudo and sys.platform == "win32": + raise request.cant_handle('"sudo":true is not supported on Windows.') + + launcher_path = request("debugLauncherPath", os.path.dirname(launcher.__file__)) + adapter_host = request("debugAdapterHost", "127.0.0.1") + + try: + servers.serve(adapter_host) + except Exception as exc: + raise request.cant_handle( + "{0} couldn't create listener socket for servers: {1}", + self.session, + exc, + ) + + launchers.spawn_debuggee( + self.session, + request, + [launcher_python], + launcher_path, + adapter_host, + args, + shell_expand_args, + cwd, + console, + console_title, + sudo, + ) + + @_start_message_handler + def attach_request(self, request): + if self.session.no_debug: + raise request.isnt_valid('"noDebug" is not supported for "attach"') + + host = request("host", str, optional=True) + port = request("port", int, optional=True) + listen = request("listen", dict, optional=True) + connect = request("connect", dict, optional=True) + pid = request("processId", (int, str), optional=True) + sub_pid = request("subProcessId", int, optional=True) + + if host != () or port != (): + if listen != (): + raise request.isnt_valid( + '"listen" and "host"/"port" are mutually exclusive' + ) + if connect != (): + raise request.isnt_valid( + '"connect" and "host"/"port" are mutually exclusive' + ) + if listen != (): + if connect != (): + raise request.isnt_valid( + '"listen" and "connect" are mutually exclusive' + ) + if pid != (): + raise request.isnt_valid( + '"listen" and "processId" are mutually exclusive' + ) + if sub_pid != (): + raise request.isnt_valid( + '"listen" and "subProcessId" are mutually exclusive' + ) + if pid != () and sub_pid != (): + raise request.isnt_valid( + '"processId" and "subProcessId" are mutually exclusive' + ) + + if listen != (): + if servers.is_serving(): + raise request.isnt_valid('Multiple concurrent "listen" sessions are not supported') + host = listen("host", "127.0.0.1") + port = listen("port", int) + adapter.access_token = None + host, port = servers.serve(host, port) + else: + if not servers.is_serving(): + servers.serve() + host, port = servers.listener.getsockname() + + # There are four distinct possibilities here. + # + # If "processId" is specified, this is attach-by-PID. We need to inject the + # debug server into the designated process, and then wait until it connects + # back to us. Since the injected server can crash, there must be a timeout. + # + # If "subProcessId" is specified, this is attach to a known subprocess, likely + # in response to a "debugpyAttach" event. If so, the debug server should be + # connected already, and thus the wait timeout is zero. + # + # If "listen" is specified, this is attach-by-socket with the server expected + # to connect to the adapter via debugpy.connect(). There is no PID known in + # advance, so just wait until the first server connection indefinitely, with + # no timeout. + # + # If "connect" is specified, this is attach-by-socket in which the server has + # spawned the adapter via debugpy.listen(). There is no PID known to the client + # in advance, but the server connection should be either be there already, or + # the server should be connecting shortly, so there must be a timeout. + # + # In the last two cases, if there's more than one server connection already, + # this is a multiprocess re-attach. The client doesn't know the PID, so we just + # connect it to the oldest server connection that we have - in most cases, it + # will be the one for the root debuggee process, but if it has exited already, + # it will be some subprocess. + if pid != (): + if not isinstance(pid, int): + try: + pid = int(pid) + except Exception: + raise request.isnt_valid('"processId" must be parseable as int') + debugpy_args = request("debugpyArgs", json.array(str)) + servers.inject(pid, debugpy_args) + timeout = common.PROCESS_SPAWN_TIMEOUT + pred = lambda conn: conn.pid == pid + else: + if sub_pid == (): + pred = lambda conn: True + timeout = common.PROCESS_SPAWN_TIMEOUT if listen == () else None + else: + pred = lambda conn: conn.pid == sub_pid + timeout = 0 + + self.channel.send_event("debugpyWaitingForServer", {"host": host, "port": port}) + conn = servers.wait_for_connection(self.session, pred, timeout) + if conn is None: + if sub_pid != (): + # If we can't find a matching subprocess, it's not always an error - + # it might have already exited, or didn't even get a chance to connect. + # To prevent the client from complaining, pretend that the "attach" + # request was successful, but that the session terminated immediately. + request.respond({}) + self.session.finalize( + 'No known subprocess with "subProcessId":{0}'.format(sub_pid) + ) + return + + raise request.cant_handle( + ( + "Timed out waiting for debug server to connect." + if timeout + else "There is no debug server connected to this adapter." + ), + sub_pid, + ) + + try: + conn.attach_to_session(self.session) + except ValueError: + request.cant_handle("{0} is already being debugged.", conn) + + @message_handler + def configurationDone_request(self, request): + if self.start_request is None or self.has_started: + request.cant_handle( + '"configurationDone" is only allowed during handling of a "launch" ' + 'or an "attach" request' + ) + + try: + self.has_started = True + try: + result = self.server.channel.delegate(request) + except messaging.NoMoreMessages: + # Server closed connection before we could receive the response to + # "configurationDone" - this can happen when debuggee exits shortly + # after starting. It's not an error, but we can't do anything useful + # here at this point, either, so just bail out. + request.respond({}) + self.start_request.respond({}) + self.session.finalize( + "{0} disconnected before responding to {1}".format( + self.server, + json.repr(request.command), + ) + ) + return + else: + request.respond(result) + except messaging.MessageHandlingError as exc: + self.start_request.cant_handle(str(exc)) + finally: + if self.start_request.response is None: + self.start_request.respond({}) + self._propagate_deferred_events() + + # Notify the client of any child processes of the debuggee that aren't already + # being debugged. + for conn in servers.connections(): + if conn.server is None and conn.ppid == self.session.pid: + self.notify_of_subprocess(conn) + + @message_handler + def evaluate_request(self, request): + propagated_request = self.server.channel.propagate(request) + + def handle_response(response): + request.respond(response.body) + + propagated_request.on_response(handle_response) + + return messaging.NO_RESPONSE + + @message_handler + def pause_request(self, request): + request.arguments["threadId"] = "*" + return self.server.channel.delegate(request) + + @message_handler + def continue_request(self, request): + request.arguments["threadId"] = "*" + + try: + return self.server.channel.delegate(request) + except messaging.NoMoreMessages: + # pydevd can sometimes allow the debuggee to exit before the queued + # "continue" response gets sent. Thus, a failed "continue" response + # indicating that the server disconnected should be treated as success. + return {"allThreadsContinued": True} + + @message_handler + def debugpySystemInfo_request(self, request): + result = {"debugpy": {"version": debugpy.__version__}} + if self.server: + try: + pydevd_info = self.server.channel.request("pydevdSystemInfo") + except Exception: + # If the server has already disconnected, or couldn't handle it, + # report what we've got. + pass + else: + result.update(pydevd_info) + return result + + @message_handler + def terminate_request(self, request): + self.session.finalize('client requested "terminate"', terminate_debuggee=True) + return {} + + @message_handler + def disconnect_request(self, request): + terminate_debuggee = request("terminateDebuggee", bool, optional=True) + if terminate_debuggee == (): + terminate_debuggee = None + self.session.finalize('client requested "disconnect"', terminate_debuggee) + return {} + + def notify_of_subprocess(self, conn): + log.info("{1} is a subprocess of {0}.", self, conn) + with self.session: + if self.start_request is None or conn in self.known_subprocesses: + return + if "processId" in self.start_request.arguments: + log.warning( + "Not reporting subprocess for {0}, because the parent process " + 'was attached to using "processId" rather than "port".', + self.session, + ) + return + + log.info("Notifying {0} about {1}.", self, conn) + body = dict(self.start_request.arguments) + self.known_subprocesses.add(conn) + self.session.notify_changed() + + for key in "processId", "listen", "preLaunchTask", "postDebugTask": + body.pop(key, None) + + body["name"] = "Subprocess {0}".format(conn.pid) + body["request"] = "attach" + body["subProcessId"] = conn.pid + + for key in "args", "processName", "pythonArgs": + body.pop(key, None) + + host = body.pop("host", None) + port = body.pop("port", None) + if "connect" not in body: + body["connect"] = {} + if "host" not in body["connect"]: + body["connect"]["host"] = host if host is not None else "127.0.0.1" + if "port" not in body["connect"]: + if port is None: + _, port = listener.getsockname() + body["connect"]["port"] = port + + self.channel.send_event("debugpyAttach", body) + + +def serve(host, port): + global listener + listener = sockets.serve("Client", Client, host, port) + return listener.getsockname() + + +def stop_serving(): + try: + listener.close() + except Exception: + log.swallow_exception(level="warning") diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/components.py b/venv/lib/python3.8/site-packages/debugpy/adapter/components.py new file mode 100644 index 0000000..cc88d14 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/adapter/components.py @@ -0,0 +1,183 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import functools + +from debugpy.common import json, log, messaging, util + + +ACCEPT_CONNECTIONS_TIMEOUT = 10 + + +class ComponentNotAvailable(Exception): + def __init__(self, type): + super().__init__(f"{type.__name__} is not available") + + +class Component(util.Observable): + """A component managed by a debug adapter: client, launcher, or debug server. + + Every component belongs to a Session, which is used for synchronization and + shared data. + + Every component has its own message channel, and provides message handlers for + that channel. All handlers should be decorated with @Component.message_handler, + which ensures that Session is locked for the duration of the handler. Thus, only + one handler is running at any given time across all components, unless the lock + is released explicitly or via Session.wait_for(). + + Components report changes to their attributes to Session, allowing one component + to wait_for() a change caused by another component. + """ + + def __init__(self, session, stream=None, channel=None): + assert (stream is None) ^ (channel is None) + + try: + lock_held = session.lock.acquire(blocking=False) + assert lock_held, "__init__ of a Component subclass must lock its Session" + finally: + session.lock.release() + + super().__init__() + + self.session = session + + if channel is None: + stream.name = str(self) + channel = messaging.JsonMessageChannel(stream, self) + channel.start() + else: + channel.name = channel.stream.name = str(self) + channel.handlers = self + self.channel = channel + self.is_connected = True + + # Do this last to avoid triggering useless notifications for assignments above. + self.observers += [lambda *_: self.session.notify_changed()] + + def __str__(self): + return f"{type(self).__name__}[{self.session.id}]" + + @property + def client(self): + return self.session.client + + @property + def launcher(self): + return self.session.launcher + + @property + def server(self): + return self.session.server + + def wait_for(self, *args, **kwargs): + return self.session.wait_for(*args, **kwargs) + + @staticmethod + def message_handler(f): + """Applied to a message handler to automatically lock and unlock the session + for its duration, and to validate the session state. + + If the handler raises ComponentNotAvailable or JsonIOError, converts it to + Message.cant_handle(). + """ + + @functools.wraps(f) + def lock_and_handle(self, message): + try: + with self.session: + return f(self, message) + except ComponentNotAvailable as exc: + raise message.cant_handle("{0}", exc, silent=True) + except messaging.MessageHandlingError as exc: + if exc.cause is message: + raise + else: + exc.propagate(message) + except messaging.JsonIOError as exc: + raise message.cant_handle( + "{0} disconnected unexpectedly", exc.stream.name, silent=True + ) + + return lock_and_handle + + def disconnect(self): + with self.session: + self.is_connected = False + self.session.finalize("{0} has disconnected".format(self)) + + +def missing(session, type): + class Missing(object): + """A dummy component that raises ComponentNotAvailable whenever some + attribute is accessed on it. + """ + + __getattr__ = __setattr__ = lambda self, *_: report() + __bool__ = __nonzero__ = lambda self: False + + def report(): + try: + raise ComponentNotAvailable(type) + except Exception as exc: + log.reraise_exception("{0} in {1}", exc, session) + + return Missing() + + +class Capabilities(dict): + """A collection of feature flags for a component. Corresponds to JSON properties + in the DAP "initialize" request or response, other than those that identify the + party. + """ + + PROPERTIES = {} + """JSON property names and default values for the the capabilities represented + by instances of this class. Keys are names, and values are either default values + or validators. + + If the value is callable, it must be a JSON validator; see debugpy.common.json for + details. If the value is not callable, it is as if json.default(value) validator + was used instead. + """ + + def __init__(self, component, message): + """Parses an "initialize" request or response and extracts the feature flags. + + For every "X" in self.PROPERTIES, sets self["X"] to the corresponding value + from message.payload if it's present there, or to the default value otherwise. + """ + + assert message.is_request("initialize") or message.is_response("initialize") + + self.component = component + + payload = message.payload + for name, validate in self.PROPERTIES.items(): + value = payload.get(name, ()) + if not callable(validate): + validate = json.default(validate) + + try: + value = validate(value) + except Exception as exc: + raise message.isnt_valid("{0} {1}", json.repr(name), exc) + + assert ( + value != () + ), f"{validate} must provide a default value for missing properties." + self[name] = value + + log.debug("{0}", self) + + def __repr__(self): + return f"{type(self).__name__}: {json.repr(dict(self))}" + + def require(self, *keys): + for key in keys: + if not self[key]: + raise messaging.MessageHandlingError( + f"{self.component} does not have capability {json.repr(key)}", + ) diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/launchers.py b/venv/lib/python3.8/site-packages/debugpy/adapter/launchers.py new file mode 100644 index 0000000..444e54d --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/adapter/launchers.py @@ -0,0 +1,191 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import os +import subprocess +import sys + +from debugpy import adapter, common +from debugpy.common import log, messaging, sockets +from debugpy.adapter import components, servers + + +class Launcher(components.Component): + """Handles the launcher side of a debug session.""" + + message_handler = components.Component.message_handler + + def __init__(self, session, stream): + with session: + assert not session.launcher + super().__init__(session, stream) + + self.pid = None + """Process ID of the debuggee process, as reported by the launcher.""" + + self.exit_code = None + """Exit code of the debuggee process.""" + + session.launcher = self + + @message_handler + def process_event(self, event): + self.pid = event("systemProcessId", int) + self.client.propagate_after_start(event) + + @message_handler + def output_event(self, event): + self.client.propagate_after_start(event) + + @message_handler + def exited_event(self, event): + self.exit_code = event("exitCode", int) + # We don't want to tell the client about this just yet, because it will then + # want to disconnect, and the launcher might still be waiting for keypress + # (if wait-on-exit was enabled). Instead, we'll report the event when we + # receive "terminated" from the launcher, right before it exits. + + @message_handler + def terminated_event(self, event): + try: + self.client.channel.send_event("exited", {"exitCode": self.exit_code}) + except Exception: + pass + self.channel.close() + + def terminate_debuggee(self): + with self.session: + if self.exit_code is None: + try: + self.channel.request("terminate") + except Exception: + pass + + +def spawn_debuggee( + session, + start_request, + python, + launcher_path, + adapter_host, + args, + shell_expand_args, + cwd, + console, + console_title, + sudo, +): + # -E tells sudo to propagate environment variables to the target process - this + # is necessary for launcher to get DEBUGPY_LAUNCHER_PORT and DEBUGPY_LOG_DIR. + cmdline = ["sudo", "-E"] if sudo else [] + cmdline += python + cmdline += [launcher_path] + env = {} + + arguments = dict(start_request.arguments) + if not session.no_debug: + _, arguments["port"] = servers.listener.getsockname() + arguments["adapterAccessToken"] = adapter.access_token + + def on_launcher_connected(sock): + listener.close() + stream = messaging.JsonIOStream.from_socket(sock) + Launcher(session, stream) + + try: + listener = sockets.serve( + "Launcher", on_launcher_connected, adapter_host, backlog=1 + ) + except Exception as exc: + raise start_request.cant_handle( + "{0} couldn't create listener socket for launcher: {1}", session, exc + ) + + try: + launcher_host, launcher_port = listener.getsockname() + launcher_addr = ( + launcher_port + if launcher_host == "127.0.0.1" + else f"{launcher_host}:{launcher_port}" + ) + cmdline += [str(launcher_addr), "--"] + cmdline += args + + if log.log_dir is not None: + env[str("DEBUGPY_LOG_DIR")] = log.log_dir + if log.stderr.levels != {"warning", "error"}: + env[str("DEBUGPY_LOG_STDERR")] = str(" ".join(log.stderr.levels)) + + if console == "internalConsole": + log.info("{0} spawning launcher: {1!r}", session, cmdline) + try: + # If we are talking to the client over stdio, sys.stdin and sys.stdout + # are redirected to avoid mangling the DAP message stream. Make sure + # the launcher also respects that. + subprocess.Popen( + cmdline, + cwd=cwd, + env=dict(list(os.environ.items()) + list(env.items())), + stdin=sys.stdin, + stdout=sys.stdout, + stderr=sys.stderr, + ) + except Exception as exc: + raise start_request.cant_handle("Failed to spawn launcher: {0}", exc) + else: + log.info('{0} spawning launcher via "runInTerminal" request.', session) + session.client.capabilities.require("supportsRunInTerminalRequest") + kinds = {"integratedTerminal": "integrated", "externalTerminal": "external"} + request_args = { + "kind": kinds[console], + "title": console_title, + "args": cmdline, + "env": env, + } + if cwd is not None: + request_args["cwd"] = cwd + if shell_expand_args: + request_args["argsCanBeInterpretedByShell"] = True + try: + # It is unspecified whether this request receives a response immediately, or only + # after the spawned command has completed running, so do not block waiting for it. + session.client.channel.send_request("runInTerminal", request_args) + except messaging.MessageHandlingError as exc: + exc.propagate(start_request) + + # If using sudo, it might prompt for password, and launcher won't start running + # until the user enters it, so don't apply timeout in that case. + if not session.wait_for( + lambda: session.launcher, + timeout=(None if sudo else common.PROCESS_SPAWN_TIMEOUT), + ): + raise start_request.cant_handle("Timed out waiting for launcher to connect") + + try: + session.launcher.channel.request(start_request.command, arguments) + except messaging.MessageHandlingError as exc: + exc.propagate(start_request) + + if not session.wait_for( + lambda: session.launcher.pid is not None, + timeout=common.PROCESS_SPAWN_TIMEOUT, + ): + raise start_request.cant_handle( + 'Timed out waiting for "process" event from launcher' + ) + + if session.no_debug: + return + + # Wait for the first incoming connection regardless of the PID - it won't + # necessarily match due to the use of stubs like py.exe or "conda run". + conn = servers.wait_for_connection( + session, lambda conn: True, timeout=common.PROCESS_SPAWN_TIMEOUT + ) + if conn is None: + raise start_request.cant_handle("Timed out waiting for debuggee to spawn") + conn.attach_to_session(session) + + finally: + listener.close() diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/servers.py b/venv/lib/python3.8/site-packages/debugpy/adapter/servers.py new file mode 100644 index 0000000..b860558 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/adapter/servers.py @@ -0,0 +1,563 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from __future__ import annotations + +import os +import subprocess +import sys +import threading +import time + +import debugpy +from debugpy import adapter +from debugpy.common import json, log, messaging, sockets +from debugpy.adapter import components + + +access_token = None +"""Access token used to authenticate with the servers.""" + +listener = None +"""Listener socket that accepts server connections.""" + +_lock = threading.RLock() + +_connections = [] +"""All servers that are connected to this adapter, in order in which they connected. +""" + +_connections_changed = threading.Event() + + +class Connection(object): + """A debug server that is connected to the adapter. + + Servers that are not participating in a debug session are managed directly by the + corresponding Connection instance. + + Servers that are participating in a debug session are managed by that sessions's + Server component instance, but Connection object remains, and takes over again + once the session ends. + """ + + disconnected: bool + + process_replaced: bool + """Whether this is a connection to a process that is being replaced in situ + by another process, e.g. via exec(). + """ + + server: Server | None + """The Server component, if this debug server belongs to Session. + """ + + pid: int | None + + ppid: int | None + + channel: messaging.JsonMessageChannel + + def __init__(self, sock): + from debugpy.adapter import sessions + + self.disconnected = False + + self.process_replaced = False + + self.server = None + + self.pid = None + + stream = messaging.JsonIOStream.from_socket(sock, str(self)) + self.channel = messaging.JsonMessageChannel(stream, self) + self.channel.start() + + try: + self.authenticate() + info = self.channel.request("pydevdSystemInfo") + process_info = info("process", json.object()) + self.pid = process_info("pid", int) + self.ppid = process_info("ppid", int, optional=True) + if self.ppid == (): + self.ppid = None + self.channel.name = stream.name = str(self) + + debugpy_dir = os.path.dirname(os.path.dirname(debugpy.__file__)) + # Note: we must check if 'debugpy' is not already in sys.modules because the + # evaluation of an import at the wrong time could deadlock Python due to + # its import lock. + # + # So, in general this evaluation shouldn't do anything. It's only + # important when pydevd attaches automatically to a subprocess. In this + # case, we have to make sure that debugpy is properly put back in the game + # for users to be able to use it.v + # + # In this case (when the import is needed), this evaluation *must* be done + # before the configurationDone request is sent -- if this is not respected + # it's possible that pydevd already started secondary threads to handle + # commands, in which case it's very likely that this command would be + # evaluated at the wrong thread and the import could potentially deadlock + # the program. + # + # Note 2: the sys module is guaranteed to be in the frame globals and + # doesn't need to be imported. + inject_debugpy = """ +if 'debugpy' not in sys.modules: + sys.path.insert(0, {debugpy_dir!r}) + try: + import debugpy + finally: + del sys.path[0] +""" + inject_debugpy = inject_debugpy.format(debugpy_dir=debugpy_dir) + + try: + self.channel.request("evaluate", {"expression": inject_debugpy}) + except messaging.MessageHandlingError: + # Failure to inject is not a fatal error - such a subprocess can + # still be debugged, it just won't support "import debugpy" in user + # code - so don't terminate the session. + log.swallow_exception( + "Failed to inject debugpy into {0}:", self, level="warning" + ) + + with _lock: + # The server can disconnect concurrently before we get here, e.g. if + # it was force-killed. If the disconnect() handler has already run, + # don't register this server or report it, since there's nothing to + # deregister it. + if self.disconnected: + return + + # An existing connection with the same PID and process_replaced == True + # corresponds to the process that replaced itself with this one, so it's + # not an error. + if any( + conn.pid == self.pid and not conn.process_replaced + for conn in _connections + ): + raise KeyError(f"{self} is already connected to this adapter") + + is_first_server = len(_connections) == 0 + _connections.append(self) + _connections_changed.set() + + except Exception: + log.swallow_exception("Failed to accept incoming server connection:") + self.channel.close() + + # If this was the first server to connect, and the main thread is inside + # wait_until_disconnected(), we want to unblock it and allow it to exit. + dont_wait_for_first_connection() + + # If we couldn't retrieve all the necessary info from the debug server, + # or there's a PID clash, we don't want to track this debuggee anymore, + # but we want to continue accepting connections. + return + + parent_session = sessions.get(self.ppid) + if parent_session is None: + parent_session = sessions.get(self.pid) + if parent_session is None: + log.info("No active debug session for parent process of {0}.", self) + else: + if self.pid == parent_session.pid: + parent_server = parent_session.server + if not (parent_server and parent_server.connection.process_replaced): + log.error("{0} is not expecting replacement.", parent_session) + self.channel.close() + return + try: + parent_session.client.notify_of_subprocess(self) + return + except Exception: + # This might fail if the client concurrently disconnects from the parent + # session. We still want to keep the connection around, in case the + # client reconnects later. If the parent session was "launch", it'll take + # care of closing the remaining server connections. + log.swallow_exception( + "Failed to notify parent session about {0}:", self + ) + + # If we got to this point, the subprocess notification was either not sent, + # or not delivered successfully. For the first server, this is expected, since + # it corresponds to the root process, and there is no other debug session to + # notify. But subsequent server connections represent subprocesses, and those + # will not start running user code until the client tells them to. Since there + # isn't going to be a client without the notification, such subprocesses have + # to be unblocked. + if is_first_server: + return + log.info("No clients to wait for - unblocking {0}.", self) + try: + self.channel.request("initialize", {"adapterID": "debugpy"}) + self.channel.request("attach", {"subProcessId": self.pid}) + self.channel.request("configurationDone") + self.channel.request("disconnect") + except Exception: + log.swallow_exception("Failed to unblock orphaned subprocess:") + self.channel.close() + + def __str__(self): + return "Server" + ("[?]" if self.pid is None else f"[pid={self.pid}]") + + def authenticate(self): + if access_token is None and adapter.access_token is None: + return + auth = self.channel.request( + "pydevdAuthorize", {"debugServerAccessToken": access_token} + ) + if auth["clientAccessToken"] != adapter.access_token: + self.channel.close() + raise RuntimeError('Mismatched "clientAccessToken"; server not authorized.') + + def request(self, request): + raise request.isnt_valid( + "Requests from the debug server to the client are not allowed." + ) + + def event(self, event): + pass + + def terminated_event(self, event): + self.channel.close() + + def disconnect(self): + with _lock: + self.disconnected = True + if self.server is not None: + # If the disconnect happened while Server was being instantiated, + # we need to tell it, so that it can clean up via Session.finalize(). + # It will also take care of deregistering the connection in that case. + self.server.disconnect() + elif self in _connections: + _connections.remove(self) + _connections_changed.set() + + def attach_to_session(self, session): + """Attaches this server to the specified Session as a Server component. + + Raises ValueError if the server already belongs to some session. + """ + + with _lock: + if self.server is not None: + raise ValueError + log.info("Attaching {0} to {1}", self, session) + self.server = Server(session, self) + + +class Server(components.Component): + """Handles the debug server side of a debug session.""" + + message_handler = components.Component.message_handler + + connection: Connection + + class Capabilities(components.Capabilities): + PROPERTIES = { + "supportsCompletionsRequest": False, + "supportsConditionalBreakpoints": False, + "supportsConfigurationDoneRequest": False, + "supportsDataBreakpoints": False, + "supportsDelayedStackTraceLoading": False, + "supportsDisassembleRequest": False, + "supportsEvaluateForHovers": False, + "supportsExceptionInfoRequest": False, + "supportsExceptionOptions": False, + "supportsFunctionBreakpoints": False, + "supportsGotoTargetsRequest": False, + "supportsHitConditionalBreakpoints": False, + "supportsLoadedSourcesRequest": False, + "supportsLogPoints": False, + "supportsModulesRequest": False, + "supportsReadMemoryRequest": False, + "supportsRestartFrame": False, + "supportsRestartRequest": False, + "supportsSetExpression": False, + "supportsSetVariable": False, + "supportsStepBack": False, + "supportsStepInTargetsRequest": False, + "supportsTerminateDebuggee": False, + "supportsTerminateRequest": False, + "supportsTerminateThreadsRequest": False, + "supportsValueFormattingOptions": False, + "exceptionBreakpointFilters": [], + "additionalModuleColumns": [], + "supportedChecksumAlgorithms": [], + } + + def __init__(self, session, connection): + assert connection.server is None + with session: + assert not session.server + super().__init__(session, channel=connection.channel) + + self.connection = connection + + assert self.session.pid is None + if self.session.launcher and self.session.launcher.pid != self.pid: + log.info( + "Launcher reported PID={0}, but server reported PID={1}", + self.session.launcher.pid, + self.pid, + ) + self.session.pid = self.pid + + session.server = self + + @property + def pid(self): + """Process ID of the debuggee process, as reported by the server.""" + return self.connection.pid + + @property + def ppid(self): + """Parent process ID of the debuggee process, as reported by the server.""" + return self.connection.ppid + + def initialize(self, request): + assert request.is_request("initialize") + self.connection.authenticate() + request = self.channel.propagate(request) + request.wait_for_response() + self.capabilities = self.Capabilities(self, request.response) + + # Generic request handler, used if there's no specific handler below. + @message_handler + def request(self, request): + # Do not delegate requests from the server by default. There is a security + # boundary between the server and the adapter, and we cannot trust arbitrary + # requests sent over that boundary, since they may contain arbitrary code + # that the client will execute - e.g. "runInTerminal". The adapter must only + # propagate requests that it knows are safe. + raise request.isnt_valid( + "Requests from the debug server to the client are not allowed." + ) + + # Generic event handler, used if there's no specific handler below. + @message_handler + def event(self, event): + self.client.propagate_after_start(event) + + @message_handler + def initialized_event(self, event): + # pydevd doesn't send it, but the adapter will send its own in any case. + pass + + @message_handler + def process_event(self, event): + # If there is a launcher, it's handling the process event. + if not self.launcher: + self.client.propagate_after_start(event) + + @message_handler + def continued_event(self, event): + # https://github.com/microsoft/ptvsd/issues/1530 + # + # DAP specification says that a step request implies that only the thread on + # which that step occurred is resumed for the duration of the step. However, + # for VS compatibility, pydevd can operate in a mode that resumes all threads + # instead. This is set according to the value of "steppingResumesAllThreads" + # in "launch" or "attach" request, which defaults to true. If explicitly set + # to false, pydevd will only resume the thread that was stepping. + # + # To ensure that the client is aware that other threads are getting resumed in + # that mode, pydevd sends a "continued" event with "allThreadsResumed": true. + # when responding to a step request. This ensures correct behavior in VSCode + # and other DAP-conformant clients. + # + # On the other hand, VS does not follow the DAP specification in this regard. + # When it requests a step, it assumes that all threads will be resumed, and + # does not expect to see "continued" events explicitly reflecting that fact. + # If such events are sent regardless, VS behaves erratically. Thus, we have + # to suppress them specifically for VS. + if self.client.client_id not in ("visualstudio", "vsformac"): + self.client.propagate_after_start(event) + + @message_handler + def exited_event(self, event: messaging.Event): + if event("pydevdReason", str, optional=True) == "processReplaced": + # The parent process used some API like exec() that replaced it with another + # process in situ. The connection will shut down immediately afterwards, but + # we need to keep the corresponding session alive long enough to report the + # subprocess to it. + self.connection.process_replaced = True + else: + # If there is a launcher, it's handling the exit code. + if not self.launcher: + self.client.propagate_after_start(event) + + @message_handler + def terminated_event(self, event): + # Do not propagate this, since we'll report our own. + self.channel.close() + + def detach_from_session(self): + with _lock: + self.is_connected = False + self.channel.handlers = self.connection + self.channel.name = self.channel.stream.name = str(self.connection) + self.connection.server = None + + def disconnect(self): + if self.connection.process_replaced: + # Wait for the replacement server to connect to the adapter, and to report + # itself to the client for this session if there is one. + log.info("{0} is waiting for replacement subprocess.", self) + session = self.session + if not session.client or not session.client.is_connected: + wait_for_connection( + session, lambda conn: conn.pid == self.pid, timeout=30 + ) + else: + self.wait_for( + lambda: ( + not session.client + or not session.client.is_connected + or any( + conn.pid == self.pid + for conn in session.client.known_subprocesses + ) + ), + timeout=30, + ) + with _lock: + _connections.remove(self.connection) + _connections_changed.set() + super().disconnect() + + +def serve(host="127.0.0.1", port=0): + global listener + listener = sockets.serve("Server", Connection, host, port) + return listener.getsockname() + + +def is_serving(): + return listener is not None + + +def stop_serving(): + global listener + try: + if listener is not None: + listener.close() + listener = None + except Exception: + log.swallow_exception(level="warning") + + +def connections(): + with _lock: + return list(_connections) + + +def wait_for_connection(session, predicate, timeout=None): + """Waits until there is a server matching the specified predicate connected to + this adapter, and returns the corresponding Connection. + + If there is more than one server connection already available, returns the oldest + one. + """ + + def wait_for_timeout(): + time.sleep(timeout) + wait_for_timeout.timed_out = True + with _lock: + _connections_changed.set() + + wait_for_timeout.timed_out = timeout == 0 + if timeout: + thread = threading.Thread( + target=wait_for_timeout, name="servers.wait_for_connection() timeout" + ) + thread.daemon = True + thread.start() + + if timeout != 0: + log.info("{0} waiting for connection from debug server...", session) + while True: + with _lock: + _connections_changed.clear() + conns = (conn for conn in _connections if predicate(conn)) + conn = next(conns, None) + if conn is not None or wait_for_timeout.timed_out: + return conn + _connections_changed.wait() + + +def wait_until_disconnected(): + """Blocks until all debug servers disconnect from the adapter. + + If there are no server connections, waits until at least one is established first, + before waiting for it to disconnect. + """ + while True: + _connections_changed.wait() + with _lock: + _connections_changed.clear() + if not len(_connections): + return + + +def dont_wait_for_first_connection(): + """Unblocks any pending wait_until_disconnected() call that is waiting on the + first server to connect. + """ + with _lock: + _connections_changed.set() + + +def inject(pid, debugpy_args): + host, port = listener.getsockname() + + cmdline = [ + sys.executable, + os.path.dirname(debugpy.__file__), + "--connect", + host + ":" + str(port), + ] + if adapter.access_token is not None: + cmdline += ["--adapter-access-token", adapter.access_token] + cmdline += debugpy_args + cmdline += ["--pid", str(pid)] + + log.info("Spawning attach-to-PID debugger injector: {0!r}", cmdline) + try: + injector = subprocess.Popen( + cmdline, + bufsize=0, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + except Exception as exc: + log.swallow_exception( + "Failed to inject debug server into process with PID={0}", pid + ) + raise messaging.MessageHandlingError( + "Failed to inject debug server into process with PID={0}: {1}".format( + pid, exc + ) + ) + + # We need to capture the output of the injector - otherwise it can get blocked + # on a write() syscall when it tries to print something. + + def capture_output(): + while True: + line = injector.stdout.readline() + if not line: + break + log.info("Injector[PID={0}] output:\n{1}", pid, line.rstrip()) + log.info("Injector[PID={0}] exited.", pid) + + thread = threading.Thread( + target=capture_output, + name=f"Injector[PID={pid}] output", + ) + thread.daemon = True + thread.start() diff --git a/venv/lib/python3.8/site-packages/debugpy/adapter/sessions.py b/venv/lib/python3.8/site-packages/debugpy/adapter/sessions.py new file mode 100644 index 0000000..ca6c574 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/adapter/sessions.py @@ -0,0 +1,281 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import itertools +import os +import signal +import threading +import time + +from debugpy import common +from debugpy.common import log, util +from debugpy.adapter import components, launchers, servers + + +_lock = threading.RLock() +_sessions = set() +_sessions_changed = threading.Event() + + +class Session(util.Observable): + """A debug session involving a client, an adapter, a launcher, and a debug server. + + The client and the adapter are always present, and at least one of launcher and debug + server is present, depending on the scenario. + """ + + _counter = itertools.count(1) + + def __init__(self): + from debugpy.adapter import clients + + super().__init__() + + self.lock = threading.RLock() + self.id = next(self._counter) + self._changed_condition = threading.Condition(self.lock) + + self.client = components.missing(self, clients.Client) + """The client component. Always present.""" + + self.launcher = components.missing(self, launchers.Launcher) + """The launcher componet. Always present in "launch" sessions, and never + present in "attach" sessions. + """ + + self.server = components.missing(self, servers.Server) + """The debug server component. Always present, unless this is a "launch" + session with "noDebug". + """ + + self.no_debug = None + """Whether this is a "noDebug" session.""" + + self.pid = None + """Process ID of the debuggee process.""" + + self.debug_options = {} + """Debug options as specified by "launch" or "attach" request.""" + + self.is_finalizing = False + """Whether finalize() has been invoked.""" + + self.observers += [lambda *_: self.notify_changed()] + + def __str__(self): + return f"Session[{self.id}]" + + def __enter__(self): + """Lock the session for exclusive access.""" + self.lock.acquire() + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + """Unlock the session.""" + self.lock.release() + + def register(self): + with _lock: + _sessions.add(self) + _sessions_changed.set() + + def notify_changed(self): + with self: + self._changed_condition.notify_all() + + # A session is considered ended once all components disconnect, and there + # are no further incoming messages from anything to handle. + components = self.client, self.launcher, self.server + if all(not com or not com.is_connected for com in components): + with _lock: + if self in _sessions: + log.info("{0} has ended.", self) + _sessions.remove(self) + _sessions_changed.set() + + def wait_for(self, predicate, timeout=None): + """Waits until predicate() becomes true. + + The predicate is invoked with the session locked. If satisfied, the method + returns immediately. Otherwise, the lock is released (even if it was held + at entry), and the method blocks waiting for some attribute of either self, + self.client, self.server, or self.launcher to change. On every change, session + is re-locked and predicate is re-evaluated, until it is satisfied. + + While the session is unlocked, message handlers for components other than + the one that is waiting can run, but message handlers for that one are still + blocked. + + If timeout is not None, the method will unblock and return after that many + seconds regardless of whether the predicate was satisfied. The method returns + False if it timed out, and True otherwise. + """ + + def wait_for_timeout(): + time.sleep(timeout) + wait_for_timeout.timed_out = True + self.notify_changed() + + wait_for_timeout.timed_out = False + if timeout is not None: + thread = threading.Thread( + target=wait_for_timeout, name="Session.wait_for() timeout" + ) + thread.daemon = True + thread.start() + + with self: + while not predicate(): + if wait_for_timeout.timed_out: + return False + self._changed_condition.wait() + return True + + def finalize(self, why, terminate_debuggee=None): + """Finalizes the debug session. + + If the server is present, sends "disconnect" request with "terminateDebuggee" + set as specified request to it; waits for it to disconnect, allowing any + remaining messages from it to be handled; and closes the server channel. + + If the launcher is present, sends "terminate" request to it, regardless of the + value of terminate; waits for it to disconnect, allowing any remaining messages + from it to be handled; and closes the launcher channel. + + If the client is present, sends "terminated" event to it. + + If terminate_debuggee=None, it is treated as True if the session has a Launcher + component, and False otherwise. + """ + + if self.is_finalizing: + return + self.is_finalizing = True + log.info("{0}; finalizing {1}.", why, self) + + if terminate_debuggee is None: + terminate_debuggee = bool(self.launcher) + + try: + self._finalize(why, terminate_debuggee) + except Exception: + # Finalization should never fail, and if it does, the session is in an + # indeterminate and likely unrecoverable state, so just fail fast. + log.swallow_exception("Fatal error while finalizing {0}", self) + os._exit(1) + + log.info("{0} finalized.", self) + + def _finalize(self, why, terminate_debuggee): + # If the client started a session, and then disconnected before issuing "launch" + # or "attach", the main thread will be blocked waiting for the first server + # connection to come in - unblock it, so that we can exit. + servers.dont_wait_for_first_connection() + + if self.server: + if self.server.is_connected: + if terminate_debuggee and self.launcher and self.launcher.is_connected: + # If we were specifically asked to terminate the debuggee, and we + # can ask the launcher to kill it, do so instead of disconnecting + # from the server to prevent debuggee from running any more code. + self.launcher.terminate_debuggee() + else: + # Otherwise, let the server handle it the best it can. + try: + self.server.channel.request( + "disconnect", {"terminateDebuggee": terminate_debuggee} + ) + except Exception: + pass + self.server.detach_from_session() + + if self.launcher and self.launcher.is_connected: + # If there was a server, we just disconnected from it above, which should + # cause the debuggee process to exit, unless it is being replaced in situ - + # so let's wait for that first. + if self.server and not self.server.connection.process_replaced: + log.info('{0} waiting for "exited" event...', self) + if not self.wait_for( + lambda: self.launcher.exit_code is not None, + timeout=common.PROCESS_EXIT_TIMEOUT, + ): + log.warning('{0} timed out waiting for "exited" event.', self) + + # Terminate the debuggee process if it's still alive for any reason - + # whether it's because there was no server to handle graceful shutdown, + # or because the server couldn't handle it for some reason - unless the + # process is being replaced in situ. + if not (self.server and self.server.connection.process_replaced): + self.launcher.terminate_debuggee() + + # Wait until the launcher message queue fully drains. There is no timeout + # here, because the final "terminated" event will only come after reading + # user input in wait-on-exit scenarios. In addition, if the process was + # replaced in situ, the launcher might still have more output to capture + # from its replacement. + log.info("{0} waiting for {1} to disconnect...", self, self.launcher) + self.wait_for(lambda: not self.launcher.is_connected) + + try: + self.launcher.channel.close() + except Exception: + log.swallow_exception() + + if self.client: + if self.client.is_connected: + # Tell the client that debugging is over, but don't close the channel until it + # tells us to, via the "disconnect" request. + try: + self.client.channel.send_event("terminated") + except Exception: + pass + + if ( + self.client.start_request is not None + and self.client.start_request.command == "launch" + and not (self.server and self.server.connection.process_replaced) + ): + servers.stop_serving() + log.info( + '"launch" session ended - killing remaining debuggee processes.' + ) + + pids_killed = set() + if self.launcher and self.launcher.pid is not None: + # Already killed above. + pids_killed.add(self.launcher.pid) + + while True: + conns = [ + conn + for conn in servers.connections() + if conn.pid not in pids_killed + ] + if not len(conns): + break + for conn in conns: + log.info("Killing {0}", conn) + try: + os.kill(conn.pid, signal.SIGTERM) + except Exception: + log.swallow_exception("Failed to kill {0}", conn) + pids_killed.add(conn.pid) + + +def get(pid): + with _lock: + return next((session for session in _sessions if session.pid == pid), None) + + +def wait_until_ended(): + """Blocks until all sessions have ended. + + A session ends when all components that it manages disconnect from it. + """ + while True: + with _lock: + if not len(_sessions): + return + _sessions_changed.clear() + _sessions_changed.wait() diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__init__.py b/venv/lib/python3.8/site-packages/debugpy/common/__init__.py new file mode 100644 index 0000000..3dde363 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/common/__init__.py @@ -0,0 +1,18 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from __future__ import annotations +import os +import typing + +if typing.TYPE_CHECKING: + __all__: list[str] + +__all__ = [] + +# The lower time bound for assuming that the process hasn't spawned successfully. +PROCESS_SPAWN_TIMEOUT = float(os.getenv("DEBUGPY_PROCESS_SPAWN_TIMEOUT", 15)) + +# The lower time bound for assuming that the process hasn't exited gracefully. +PROCESS_EXIT_TIMEOUT = float(os.getenv("DEBUGPY_PROCESS_EXIT_TIMEOUT", 5)) diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..1efd686 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/json.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/json.cpython-38.pyc new file mode 100644 index 0000000..18305eb Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/json.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/log.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/log.cpython-38.pyc new file mode 100644 index 0000000..6b082f2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/log.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/messaging.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/messaging.cpython-38.pyc new file mode 100644 index 0000000..7540135 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/messaging.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/singleton.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/singleton.cpython-38.pyc new file mode 100644 index 0000000..e4847e0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/singleton.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/sockets.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/sockets.cpython-38.pyc new file mode 100644 index 0000000..56c53f8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/sockets.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/stacks.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/stacks.cpython-38.pyc new file mode 100644 index 0000000..0e1d84a Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/stacks.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/timestamp.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/timestamp.cpython-38.pyc new file mode 100644 index 0000000..b92d0fc Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/timestamp.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/util.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/util.cpython-38.pyc new file mode 100644 index 0000000..81ab315 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/common/__pycache__/util.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/common/json.py b/venv/lib/python3.8/site-packages/debugpy/common/json.py new file mode 100644 index 0000000..6f3e2b2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/common/json.py @@ -0,0 +1,292 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +"""Improved JSON serialization. +""" + +import builtins +import json +import numbers +import operator + + +JsonDecoder = json.JSONDecoder + + +class JsonEncoder(json.JSONEncoder): + """Customizable JSON encoder. + + If the object implements __getstate__, then that method is invoked, and its + result is serialized instead of the object itself. + """ + + def default(self, value): + try: + get_state = value.__getstate__ + except AttributeError: + pass + else: + return get_state() + return super().default(value) + + +class JsonObject(object): + """A wrapped Python object that formats itself as JSON when asked for a string + representation via str() or format(). + """ + + json_encoder_factory = JsonEncoder + """Used by __format__ when format_spec is not empty.""" + + json_encoder = json_encoder_factory(indent=4) + """The default encoder used by __format__ when format_spec is empty.""" + + def __init__(self, value): + assert not isinstance(value, JsonObject) + self.value = value + + def __getstate__(self): + raise NotImplementedError + + def __repr__(self): + return builtins.repr(self.value) + + def __str__(self): + return format(self) + + def __format__(self, format_spec): + """If format_spec is empty, uses self.json_encoder to serialize self.value + as a string. Otherwise, format_spec is treated as an argument list to be + passed to self.json_encoder_factory - which defaults to JSONEncoder - and + then the resulting formatter is used to serialize self.value as a string. + + Example:: + + format("{0} {0:indent=4,sort_keys=True}", json.repr(x)) + """ + if format_spec: + # At this point, format_spec is a string that looks something like + # "indent=4,sort_keys=True". What we want is to build a function call + # from that which looks like: + # + # json_encoder_factory(indent=4,sort_keys=True) + # + # which we can then eval() to create our encoder instance. + make_encoder = "json_encoder_factory(" + format_spec + ")" + encoder = eval( + make_encoder, {"json_encoder_factory": self.json_encoder_factory} + ) + else: + encoder = self.json_encoder + return encoder.encode(self.value) + + +# JSON property validators, for use with MessageDict. +# +# A validator is invoked with the actual value of the JSON property passed to it as +# the sole argument; or if the property is missing in JSON, then () is passed. Note +# that None represents an actual null in JSON, while () is a missing value. +# +# The validator must either raise TypeError or ValueError describing why the property +# value is invalid, or else return the value of the property, possibly after performing +# some substitutions - e.g. replacing () with some default value. + + +def _converter(value, classinfo): + """Convert value (str) to number, otherwise return None if is not possible""" + for one_info in classinfo: + if issubclass(one_info, numbers.Number): + try: + return one_info(value) + except ValueError: + pass + + +def of_type(*classinfo, **kwargs): + """Returns a validator for a JSON property that requires it to have a value of + the specified type. If optional=True, () is also allowed. + + The meaning of classinfo is the same as for isinstance(). + """ + + assert len(classinfo) + optional = kwargs.pop("optional", False) + assert not len(kwargs) + + def validate(value): + if (optional and value == ()) or isinstance(value, classinfo): + return value + else: + converted_value = _converter(value, classinfo) + if converted_value: + return converted_value + + if not optional and value == (): + raise ValueError("must be specified") + raise TypeError("must be " + " or ".join(t.__name__ for t in classinfo)) + + return validate + + +def default(default): + """Returns a validator for a JSON property with a default value. + + The validator will only allow property values that have the same type as the + specified default value. + """ + + def validate(value): + if value == (): + return default + elif isinstance(value, type(default)): + return value + else: + raise TypeError("must be {0}".format(type(default).__name__)) + + return validate + + +def enum(*values, **kwargs): + """Returns a validator for a JSON enum. + + The validator will only allow the property to have one of the specified values. + + If optional=True, and the property is missing, the first value specified is used + as the default. + """ + + assert len(values) + optional = kwargs.pop("optional", False) + assert not len(kwargs) + + def validate(value): + if optional and value == (): + return values[0] + elif value in values: + return value + else: + raise ValueError("must be one of: {0!r}".format(list(values))) + + return validate + + +def array(validate_item=False, vectorize=False, size=None): + """Returns a validator for a JSON array. + + If the property is missing, it is treated as if it were []. Otherwise, it must + be a list. + + If validate_item=False, it's treated as if it were (lambda x: x) - i.e. any item + is considered valid, and is unchanged. If validate_item is a type or a tuple, + it's treated as if it were json.of_type(validate). + + Every item in the list is replaced with validate_item(item) in-place, propagating + any exceptions raised by the latter. If validate_item is a type or a tuple, it is + treated as if it were json.of_type(validate_item). + + If vectorize=True, and the value is neither a list nor a dict, it is treated as + if it were a single-element list containing that single value - e.g. "foo" is + then the same as ["foo"]; but {} is an error, and not [{}]. + + If size is not None, it can be an int, a tuple of one int, a tuple of two ints, + or a set. If it's an int, the array must have exactly that many elements. If it's + a tuple of one int, it's the minimum length. If it's a tuple of two ints, they + are the minimum and the maximum lengths. If it's a set, it's the set of sizes that + are valid - e.g. for {2, 4}, the array can be either 2 or 4 elements long. + """ + + if not validate_item: + validate_item = lambda x: x + elif isinstance(validate_item, type) or isinstance(validate_item, tuple): + validate_item = of_type(validate_item) + + if size is None: + validate_size = lambda _: True + elif isinstance(size, set): + size = {operator.index(n) for n in size} + validate_size = lambda value: ( + len(value) in size + or "must have {0} elements".format( + " or ".join(str(n) for n in sorted(size)) + ) + ) + elif isinstance(size, tuple): + assert 1 <= len(size) <= 2 + size = tuple(operator.index(n) for n in size) + min_len, max_len = (size + (None,))[0:2] + validate_size = lambda value: ( + "must have at least {0} elements".format(min_len) + if len(value) < min_len + else "must have at most {0} elements".format(max_len) + if max_len is not None and len(value) < max_len + else True + ) + else: + size = operator.index(size) + validate_size = lambda value: ( + len(value) == size or "must have {0} elements".format(size) + ) + + def validate(value): + if value == (): + value = [] + elif vectorize and not isinstance(value, (list, dict)): + value = [value] + + of_type(list)(value) + + size_err = validate_size(value) # True if valid, str if error + if size_err is not True: + raise ValueError(size_err) + + for i, item in enumerate(value): + try: + value[i] = validate_item(item) + except (TypeError, ValueError) as exc: + raise type(exc)(f"[{repr(i)}] {exc}") + return value + + return validate + + +def object(validate_value=False): + """Returns a validator for a JSON object. + + If the property is missing, it is treated as if it were {}. Otherwise, it must + be a dict. + + If validate_value=False, it's treated as if it were (lambda x: x) - i.e. any + value is considered valid, and is unchanged. If validate_value is a type or a + tuple, it's treated as if it were json.of_type(validate_value). + + Every value in the dict is replaced with validate_value(value) in-place, propagating + any exceptions raised by the latter. If validate_value is a type or a tuple, it is + treated as if it were json.of_type(validate_value). Keys are not affected. + """ + + if isinstance(validate_value, type) or isinstance(validate_value, tuple): + validate_value = of_type(validate_value) + + def validate(value): + if value == (): + return {} + + of_type(dict)(value) + if validate_value: + for k, v in value.items(): + try: + value[k] = validate_value(v) + except (TypeError, ValueError) as exc: + raise type(exc)(f"[{repr(k)}] {exc}") + return value + + return validate + + +def repr(value): + return JsonObject(value) + + +dumps = json.dumps +loads = json.loads diff --git a/venv/lib/python3.8/site-packages/debugpy/common/log.py b/venv/lib/python3.8/site-packages/debugpy/common/log.py new file mode 100644 index 0000000..2859dc2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/common/log.py @@ -0,0 +1,384 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import atexit +import contextlib +import functools +import inspect +import io +import os +import platform +import sys +import threading +import traceback + +import debugpy +from debugpy.common import json, timestamp, util + + +LEVELS = ("debug", "info", "warning", "error") +"""Logging levels, lowest to highest importance. +""" + +log_dir = os.getenv("DEBUGPY_LOG_DIR") +"""If not None, debugger logs its activity to a file named debugpy.*-.log +in the specified directory, where is the return value of os.getpid(). +""" + +timestamp_format = "09.3f" +"""Format spec used for timestamps. Can be changed to dial precision up or down. +""" + +_lock = threading.RLock() +_tls = threading.local() +_files = {} # filename -> LogFile +_levels = set() # combined for all log files + + +def _update_levels(): + global _levels + _levels = frozenset(level for file in _files.values() for level in file.levels) + + +class LogFile(object): + def __init__(self, filename, file, levels=LEVELS, close_file=True): + info("Also logging to {0}.", json.repr(filename)) + self.filename = filename + self.file = file + self.close_file = close_file + self._levels = frozenset(levels) + + with _lock: + _files[self.filename] = self + _update_levels() + info( + "{0} {1}\n{2} {3} ({4}-bit)\ndebugpy {5}", + platform.platform(), + platform.machine(), + platform.python_implementation(), + platform.python_version(), + 64 if sys.maxsize > 2 ** 32 else 32, + debugpy.__version__, + _to_files=[self], + ) + + @property + def levels(self): + return self._levels + + @levels.setter + def levels(self, value): + with _lock: + self._levels = frozenset(LEVELS if value is all else value) + _update_levels() + + def write(self, level, output): + if level in self.levels: + try: + self.file.write(output) + self.file.flush() + except Exception: + pass + + def close(self): + with _lock: + del _files[self.filename] + _update_levels() + info("Not logging to {0} anymore.", json.repr(self.filename)) + + if self.close_file: + try: + self.file.close() + except Exception: + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + +class NoLog(object): + file = filename = None + + __bool__ = __nonzero__ = lambda self: False + + def close(self): + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + pass + + +# Used to inject a newline into stderr if logging there, to clean up the output +# when it's intermixed with regular prints from other sources. +def newline(level="info"): + with _lock: + stderr.write(level, "\n") + + +def write(level, text, _to_files=all): + assert level in LEVELS + + t = timestamp.current() + format_string = "{0}+{1:" + timestamp_format + "}: " + prefix = format_string.format(level[0].upper(), t) + + text = getattr(_tls, "prefix", "") + text + indent = "\n" + (" " * len(prefix)) + output = indent.join(text.split("\n")) + output = prefix + output + "\n\n" + + with _lock: + if _to_files is all: + _to_files = _files.values() + for file in _to_files: + file.write(level, output) + + return text + + +def write_format(level, format_string, *args, **kwargs): + # Don't spend cycles doing expensive formatting if we don't have to. Errors are + # always formatted, so that error() can return the text even if it's not logged. + if level != "error" and level not in _levels: + return + + try: + text = format_string.format(*args, **kwargs) + except Exception: + reraise_exception() + + return write(level, text, kwargs.pop("_to_files", all)) + + +debug = functools.partial(write_format, "debug") +info = functools.partial(write_format, "info") +warning = functools.partial(write_format, "warning") + + +def error(*args, **kwargs): + """Logs an error. + + Returns the output wrapped in AssertionError. Thus, the following:: + + raise log.error(s, ...) + + has the same effect as:: + + log.error(...) + assert False, (s.format(...)) + """ + return AssertionError(write_format("error", *args, **kwargs)) + + +def _exception(format_string="", *args, **kwargs): + level = kwargs.pop("level", "error") + exc_info = kwargs.pop("exc_info", sys.exc_info()) + + if format_string: + format_string += "\n\n" + format_string += "{exception}\nStack where logged:\n{stack}" + + exception = "".join(traceback.format_exception(*exc_info)) + + f = inspect.currentframe() + f = f.f_back if f else f # don't log this frame + try: + stack = "".join(traceback.format_stack(f)) + finally: + del f # avoid cycles + + write_format( + level, format_string, *args, exception=exception, stack=stack, **kwargs + ) + + +def swallow_exception(format_string="", *args, **kwargs): + """Logs an exception with full traceback. + + If format_string is specified, it is formatted with format(*args, **kwargs), and + prepended to the exception traceback on a separate line. + + If exc_info is specified, the exception it describes will be logged. Otherwise, + sys.exc_info() - i.e. the exception being handled currently - will be logged. + + If level is specified, the exception will be logged as a message of that level. + The default is "error". + """ + + _exception(format_string, *args, **kwargs) + + +def reraise_exception(format_string="", *args, **kwargs): + """Like swallow_exception(), but re-raises the current exception after logging it.""" + + assert "exc_info" not in kwargs + _exception(format_string, *args, **kwargs) + raise + + +def to_file(filename=None, prefix=None, levels=LEVELS): + """Starts logging all messages at the specified levels to the designated file. + + Either filename or prefix must be specified, but not both. + + If filename is specified, it designates the log file directly. + + If prefix is specified, the log file is automatically created in options.log_dir, + with filename computed as prefix + os.getpid(). If log_dir is None, no log file + is created, and the function returns immediately. + + If the file with the specified or computed name is already being used as a log + file, it is not overwritten, but its levels are updated as specified. + + The function returns an object with a close() method. When the object is closed, + logs are not written into that file anymore. Alternatively, the returned object + can be used in a with-statement: + + with log.to_file("some.log"): + # now also logging to some.log + # not logging to some.log anymore + """ + + assert (filename is not None) ^ (prefix is not None) + + if filename is None: + if log_dir is None: + return NoLog() + try: + os.makedirs(log_dir) + except OSError: + pass + filename = f"{log_dir}/{prefix}-{os.getpid()}.log" + + file = _files.get(filename) + if file is None: + file = LogFile(filename, io.open(filename, "w", encoding="utf-8"), levels) + else: + file.levels = levels + return file + + +@contextlib.contextmanager +def prefixed(format_string, *args, **kwargs): + """Adds a prefix to all messages logged from the current thread for the duration + of the context manager. + """ + prefix = format_string.format(*args, **kwargs) + old_prefix = getattr(_tls, "prefix", "") + _tls.prefix = prefix + old_prefix + try: + yield + finally: + _tls.prefix = old_prefix + + +def describe_environment(header): + import sysconfig + import site # noqa + + result = [header, "\n\n"] + + def report(s, *args, **kwargs): + result.append(s.format(*args, **kwargs)) + + def report_paths(get_paths, label=None): + prefix = f" {label or get_paths}: " + + expr = None + if not callable(get_paths): + expr = get_paths + get_paths = lambda: util.evaluate(expr) + try: + paths = get_paths() + except AttributeError: + report("{0}\n", prefix) + return + except Exception: + swallow_exception( + "Error evaluating {0}", + repr(expr) if expr else util.srcnameof(get_paths), + ) + return + + if not isinstance(paths, (list, tuple)): + paths = [paths] + + for p in sorted(paths): + report("{0}{1}", prefix, p) + rp = os.path.realpath(p) + if p != rp: + report("({0})", rp) + report("\n") + + prefix = " " * len(prefix) + + report("System paths:\n") + report_paths("sys.prefix") + report_paths("sys.base_prefix") + report_paths("sys.real_prefix") + report_paths("site.getsitepackages()") + report_paths("site.getusersitepackages()") + + site_packages = [ + p + for p in sys.path + if os.path.exists(p) and os.path.basename(p) == "site-packages" + ] + report_paths(lambda: site_packages, "sys.path (site-packages)") + + for name in sysconfig.get_path_names(): + expr = "sysconfig.get_path({0!r})".format(name) + report_paths(expr) + + report_paths("os.__file__") + report_paths("threading.__file__") + report_paths("debugpy.__file__") + + result = "".join(result).rstrip("\n") + info("{0}", result) + + +stderr = LogFile( + "", + sys.stderr, + levels=os.getenv("DEBUGPY_LOG_STDERR", "warning error").split(), + close_file=False, +) + + +@atexit.register +def _close_files(): + for file in tuple(_files.values()): + file.close() + + +# The following are helper shortcuts for printf debugging. They must never be used +# in production code. + + +def _repr(value): # pragma: no cover + warning("$REPR {0!r}", value) + + +def _vars(*names): # pragma: no cover + locals = inspect.currentframe().f_back.f_locals + if names: + locals = {name: locals[name] for name in names if name in locals} + warning("$VARS {0!r}", locals) + + +def _stack(): # pragma: no cover + stack = "\n".join(traceback.format_stack()) + warning("$STACK:\n\n{0}", stack) + + +def _threads(): # pragma: no cover + output = "\n".join([str(t) for t in threading.enumerate()]) + warning("$THREADS:\n\n{0}", output) diff --git a/venv/lib/python3.8/site-packages/debugpy/common/messaging.py b/venv/lib/python3.8/site-packages/debugpy/common/messaging.py new file mode 100644 index 0000000..eb9556d --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/common/messaging.py @@ -0,0 +1,1505 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +"""An implementation of the session and presentation layers as used in the Debug +Adapter Protocol (DAP): channels and their lifetime, JSON messages, requests, +responses, and events. + +https://microsoft.github.io/debug-adapter-protocol/overview#base-protocol +""" + +from __future__ import annotations + +import collections +import contextlib +import functools +import itertools +import os +import socket +import sys +import threading + +from debugpy.common import json, log, util +from debugpy.common.util import hide_thread_from_debugger + + +class JsonIOError(IOError): + """Indicates that a read or write operation on JsonIOStream has failed.""" + + def __init__(self, *args, **kwargs): + stream = kwargs.pop("stream") + cause = kwargs.pop("cause", None) + if not len(args) and cause is not None: + args = [str(cause)] + super().__init__(*args, **kwargs) + + self.stream = stream + """The stream that couldn't be read or written. + + Set by JsonIOStream.read_json() and JsonIOStream.write_json(). + + JsonMessageChannel relies on this value to decide whether a NoMoreMessages + instance that bubbles up to the message loop is related to that loop. + """ + + self.cause = cause + """The underlying exception, if any.""" + + +class NoMoreMessages(JsonIOError, EOFError): + """Indicates that there are no more messages that can be read from or written + to a stream. + """ + + def __init__(self, *args, **kwargs): + args = args if len(args) else ["No more messages"] + super().__init__(*args, **kwargs) + + +class JsonIOStream(object): + """Implements a JSON value stream over two byte streams (input and output). + + Each value is encoded as a DAP packet, with metadata headers and a JSON payload. + """ + + MAX_BODY_SIZE = 0xFFFFFF + + json_decoder_factory = json.JsonDecoder + """Used by read_json() when decoder is None.""" + + json_encoder_factory = json.JsonEncoder + """Used by write_json() when encoder is None.""" + + @classmethod + def from_stdio(cls, name="stdio"): + """Creates a new instance that receives messages from sys.stdin, and sends + them to sys.stdout. + """ + return cls(sys.stdin.buffer, sys.stdout.buffer, name) + + @classmethod + def from_process(cls, process, name="stdio"): + """Creates a new instance that receives messages from process.stdin, and sends + them to process.stdout. + """ + return cls(process.stdout, process.stdin, name) + + @classmethod + def from_socket(cls, sock, name=None): + """Creates a new instance that sends and receives messages over a socket.""" + sock.settimeout(None) # make socket blocking + if name is None: + name = repr(sock) + + # TODO: investigate switching to buffered sockets; readline() on unbuffered + # sockets is very slow! Although the implementation of readline() itself is + # native code, it calls read(1) in a loop - and that then ultimately calls + # SocketIO.readinto(), which is implemented in Python. + socket_io = sock.makefile("rwb", 0) + + # SocketIO.close() doesn't close the underlying socket. + def cleanup(): + try: + sock.shutdown(socket.SHUT_RDWR) + except Exception: + pass + sock.close() + + return cls(socket_io, socket_io, name, cleanup) + + def __init__(self, reader, writer, name=None, cleanup=lambda: None): + """Creates a new JsonIOStream. + + reader must be a BytesIO-like object, from which incoming messages will be + read by read_json(). + + writer must be a BytesIO-like object, into which outgoing messages will be + written by write_json(). + + cleanup must be a callable; it will be invoked without arguments when the + stream is closed. + + reader.readline() must treat "\n" as the line terminator, and must leave "\r" + as is - it must not replace "\r\n" with "\n" automatically, as TextIO does. + """ + + if name is None: + name = f"reader={reader!r}, writer={writer!r}" + + self.name = name + self._reader = reader + self._writer = writer + self._cleanup = cleanup + self._closed = False + + def close(self): + """Closes the stream, the reader, and the writer.""" + + if self._closed: + return + self._closed = True + + log.debug("Closing {0} message stream", self.name) + try: + try: + # Close the writer first, so that the other end of the connection has + # its message loop waiting on read() unblocked. If there is an exception + # while closing the writer, we still want to try to close the reader - + # only one exception can bubble up, so if both fail, it'll be the one + # from reader. + try: + self._writer.close() + finally: + if self._reader is not self._writer: + self._reader.close() + finally: + self._cleanup() + except Exception: + log.reraise_exception("Error while closing {0} message stream", self.name) + + def _log_message(self, dir, data, logger=log.debug): + return logger("{0} {1} {2}", self.name, dir, data) + + def _read_line(self, reader): + line = b"" + while True: + try: + line += reader.readline() + except Exception as exc: + raise NoMoreMessages(str(exc), stream=self) + if not line: + raise NoMoreMessages(stream=self) + if line.endswith(b"\r\n"): + line = line[0:-2] + return line + + def read_json(self, decoder=None): + """Read a single JSON value from reader. + + Returns JSON value as parsed by decoder.decode(), or raises NoMoreMessages + if there are no more values to be read. + """ + + decoder = decoder if decoder is not None else self.json_decoder_factory() + reader = self._reader + read_line = functools.partial(self._read_line, reader) + + # If any error occurs while reading and parsing the message, log the original + # raw message data as is, so that it's possible to diagnose missing or invalid + # headers, encoding issues, JSON syntax errors etc. + def log_message_and_reraise_exception(format_string="", *args, **kwargs): + if format_string: + format_string += "\n\n" + format_string += "{name} -->\n{raw_lines}" + + raw_lines = b"".join(raw_chunks).split(b"\n") + raw_lines = "\n".join(repr(line) for line in raw_lines) + + log.reraise_exception( + format_string, *args, name=self.name, raw_lines=raw_lines, **kwargs + ) + + raw_chunks = [] + headers = {} + + while True: + try: + line = read_line() + except Exception: + # Only log it if we have already read some headers, and are looking + # for a blank line terminating them. If this is the very first read, + # there's no message data to log in any case, and the caller might + # be anticipating the error - e.g. NoMoreMessages on disconnect. + if headers: + log_message_and_reraise_exception( + "Error while reading message headers:" + ) + else: + raise + + raw_chunks += [line, b"\n"] + if line == b"": + break + + key, _, value = line.partition(b":") + headers[key] = value + + try: + length = int(headers[b"Content-Length"]) + if not (0 <= length <= self.MAX_BODY_SIZE): + raise ValueError + except (KeyError, ValueError): + try: + raise IOError("Content-Length is missing or invalid:") + except Exception: + log_message_and_reraise_exception() + + body_start = len(raw_chunks) + body_remaining = length + while body_remaining > 0: + try: + chunk = reader.read(body_remaining) + if not chunk: + raise EOFError + except Exception as exc: + # Not logged due to https://github.com/microsoft/ptvsd/issues/1699 + raise NoMoreMessages(str(exc), stream=self) + + raw_chunks.append(chunk) + body_remaining -= len(chunk) + assert body_remaining == 0 + + body = b"".join(raw_chunks[body_start:]) + try: + body = body.decode("utf-8") + except Exception: + log_message_and_reraise_exception() + + try: + body = decoder.decode(body) + except Exception: + log_message_and_reraise_exception() + + # If parsed successfully, log as JSON for readability. + self._log_message("-->", body) + return body + + def write_json(self, value, encoder=None): + """Write a single JSON value into writer. + + Value is written as encoded by encoder.encode(). + """ + + if self._closed: + # Don't log this - it's a common pattern to write to a stream while + # anticipating EOFError from it in case it got closed concurrently. + raise NoMoreMessages(stream=self) + + encoder = encoder if encoder is not None else self.json_encoder_factory() + writer = self._writer + + # Format the value as a message, and try to log any failures using as much + # information as we already have at the point of the failure. For example, + # if it fails after it is serialized to JSON, log that JSON. + + try: + body = encoder.encode(value) + except Exception: + self._log_message("<--", repr(value), logger=log.reraise_exception) + body = body.encode("utf-8") + + header = f"Content-Length: {len(body)}\r\n\r\n".encode("ascii") + data = header + body + data_written = 0 + try: + while data_written < len(data): + written = writer.write(data[data_written:]) + data_written += written + writer.flush() + except Exception as exc: + self._log_message("<--", value, logger=log.swallow_exception) + raise JsonIOError(stream=self, cause=exc) + + self._log_message("<--", value) + + def __repr__(self): + return f"{type(self).__name__}({self.name!r})" + + +class MessageDict(collections.OrderedDict): + """A specialized dict that is used for JSON message payloads - Request.arguments, + Response.body, and Event.body. + + For all members that normally throw KeyError when a requested key is missing, this + dict raises InvalidMessageError instead. Thus, a message handler can skip checks + for missing properties, and just work directly with the payload on the assumption + that it is valid according to the protocol specification; if anything is missing, + it will be reported automatically in the proper manner. + + If the value for the requested key is itself a dict, it is returned as is, and not + automatically converted to MessageDict. Thus, to enable convenient chaining - e.g. + d["a"]["b"]["c"] - the dict must consistently use MessageDict instances rather than + vanilla dicts for all its values, recursively. This is guaranteed for the payload + of all freshly received messages (unless and until it is mutated), but there is no + such guarantee for outgoing messages. + """ + + def __init__(self, message, items=None): + assert message is None or isinstance(message, Message) + + if items is None: + super().__init__() + else: + super().__init__(items) + + self.message = message + """The Message object that owns this dict. + + For any instance exposed via a Message object corresponding to some incoming + message, it is guaranteed to reference that Message object. There is no similar + guarantee for outgoing messages. + """ + + def __repr__(self): + try: + return format(json.repr(self)) + except Exception: + return super().__repr__() + + def __call__(self, key, validate, optional=False): + """Like get(), but with validation. + + The item is first retrieved as if with self.get(key, default=()) - the default + value is () rather than None, so that JSON nulls are distinguishable from + missing properties. + + If optional=True, and the value is (), it's returned as is. Otherwise, the + item is validated by invoking validate(item) on it. + + If validate=False, it's treated as if it were (lambda x: x) - i.e. any value + is considered valid, and is returned unchanged. If validate is a type or a + tuple, it's treated as json.of_type(validate). Otherwise, if validate is not + callable(), it's treated as json.default(validate). + + If validate() returns successfully, the item is substituted with the value + it returns - thus, the validator can e.g. replace () with a suitable default + value for the property. + + If validate() raises TypeError or ValueError, raises InvalidMessageError with + the same text that applies_to(self.messages). + + See debugpy.common.json for reusable validators. + """ + + if not validate: + validate = lambda x: x + elif isinstance(validate, type) or isinstance(validate, tuple): + validate = json.of_type(validate, optional=optional) + elif not callable(validate): + validate = json.default(validate) + + value = self.get(key, ()) + try: + value = validate(value) + except (TypeError, ValueError) as exc: + message = Message if self.message is None else self.message + err = str(exc) + if not err.startswith("["): + err = " " + err + raise message.isnt_valid("{0}{1}", json.repr(key), err) + return value + + def _invalid_if_no_key(func): + def wrap(self, key, *args, **kwargs): + try: + return func(self, key, *args, **kwargs) + except KeyError: + message = Message if self.message is None else self.message + raise message.isnt_valid("missing property {0!r}", key) + + return wrap + + __getitem__ = _invalid_if_no_key(collections.OrderedDict.__getitem__) + __delitem__ = _invalid_if_no_key(collections.OrderedDict.__delitem__) + pop = _invalid_if_no_key(collections.OrderedDict.pop) + + del _invalid_if_no_key + + +def _payload(value): + """JSON validator for message payload. + + If that value is missing or null, it is treated as if it were {}. + """ + + if value is not None and value != (): + if isinstance(value, dict): # can be int, str, list... + assert isinstance(value, MessageDict) + return value + + # Missing payload. Construct a dummy MessageDict, and make it look like it was + # deserialized. See JsonMessageChannel._parse_incoming_message for why it needs + # to have associate_with(). + + def associate_with(message): + value.message = message + + value = MessageDict(None) + value.associate_with = associate_with + return value + + +class Message(object): + """Represents a fully parsed incoming or outgoing message. + + https://microsoft.github.io/debug-adapter-protocol/specification#protocolmessage + """ + + def __init__(self, channel, seq, json=None): + self.channel = channel + + self.seq = seq + """Sequence number of the message in its channel. + + This can be None for synthesized Responses. + """ + + self.json = json + """For incoming messages, the MessageDict containing raw JSON from which + this message was originally parsed. + """ + + def __str__(self): + return json.repr(self.json) if self.json is not None else repr(self) + + def describe(self): + """A brief description of the message that is enough to identify it. + + Examples: + '#1 request "launch" from IDE' + '#2 response to #1 request "launch" from IDE'. + """ + raise NotImplementedError + + @property + def payload(self) -> MessageDict: + """Payload of the message - self.body or self.arguments, depending on the + message type. + """ + raise NotImplementedError + + def __call__(self, *args, **kwargs): + """Same as self.payload(...).""" + return self.payload(*args, **kwargs) + + def __contains__(self, key): + """Same as (key in self.payload).""" + return key in self.payload + + def is_event(self, *event): + """Returns True if this message is an Event of one of the specified types.""" + if not isinstance(self, Event): + return False + return event == () or self.event in event + + def is_request(self, *command): + """Returns True if this message is a Request of one of the specified types.""" + if not isinstance(self, Request): + return False + return command == () or self.command in command + + def is_response(self, *command): + """Returns True if this message is a Response to a request of one of the + specified types. + """ + if not isinstance(self, Response): + return False + return command == () or self.request.command in command + + def error(self, exc_type, format_string, *args, **kwargs): + """Returns a new exception of the specified type from the point at which it is + invoked, with the specified formatted message as the reason. + + The resulting exception will have its cause set to the Message object on which + error() was called. Additionally, if that message is a Request, a failure + response is immediately sent. + """ + + assert issubclass(exc_type, MessageHandlingError) + + silent = kwargs.pop("silent", False) + reason = format_string.format(*args, **kwargs) + exc = exc_type(reason, self, silent) # will log it + + if isinstance(self, Request): + self.respond(exc) + return exc + + def isnt_valid(self, *args, **kwargs): + """Same as self.error(InvalidMessageError, ...).""" + return self.error(InvalidMessageError, *args, **kwargs) + + def cant_handle(self, *args, **kwargs): + """Same as self.error(MessageHandlingError, ...).""" + return self.error(MessageHandlingError, *args, **kwargs) + + +class Event(Message): + """Represents an incoming event. + + https://microsoft.github.io/debug-adapter-protocol/specification#event + + It is guaranteed that body is a MessageDict associated with this Event, and so + are all the nested dicts in it. If "body" was missing or null in JSON, body is + an empty dict. + + To handle the event, JsonMessageChannel tries to find a handler for this event in + JsonMessageChannel.handlers. Given event="X", if handlers.X_event exists, then it + is the specific handler for this event. Otherwise, handlers.event must exist, and + it is the generic handler for this event. A missing handler is a fatal error. + + No further incoming messages are processed until the handler returns, except for + responses to requests that have wait_for_response() invoked on them. + + To report failure to handle the event, the handler must raise an instance of + MessageHandlingError that applies_to() the Event object it was handling. Any such + failure is logged, after which the message loop moves on to the next message. + + Helper methods Message.isnt_valid() and Message.cant_handle() can be used to raise + the appropriate exception type that applies_to() the Event object. + """ + + def __init__(self, channel, seq, event, body, json=None): + super().__init__(channel, seq, json) + + self.event = event + + if isinstance(body, MessageDict) and hasattr(body, "associate_with"): + body.associate_with(self) + self.body = body + + def describe(self): + return f"#{self.seq} event {json.repr(self.event)} from {self.channel}" + + @property + def payload(self): + return self.body + + @staticmethod + def _parse(channel, message_dict): + seq = message_dict("seq", int) + event = message_dict("event", str) + body = message_dict("body", _payload) + message = Event(channel, seq, event, body, json=message_dict) + channel._enqueue_handlers(message, message._handle) + + def _handle(self): + channel = self.channel + handler = channel._get_handler_for("event", self.event) + try: + try: + result = handler(self) + assert ( + result is None + ), f"Handler {util.srcnameof(handler)} tried to respond to {self.describe()}." + except MessageHandlingError as exc: + if not exc.applies_to(self): + raise + log.error( + "Handler {0}\ncouldn't handle {1}:\n{2}", + util.srcnameof(handler), + self.describe(), + str(exc), + ) + except Exception: + log.reraise_exception( + "Handler {0}\ncouldn't handle {1}:", + util.srcnameof(handler), + self.describe(), + ) + + +NO_RESPONSE = object() +"""Can be returned from a request handler in lieu of the response body, to indicate +that no response is to be sent. + +Request.respond() must be invoked explicitly at some later point to provide a response. +""" + + +class Request(Message): + """Represents an incoming or an outgoing request. + + Incoming requests are represented directly by instances of this class. + + Outgoing requests are represented by instances of OutgoingRequest, which provides + additional functionality to handle responses. + + For incoming requests, it is guaranteed that arguments is a MessageDict associated + with this Request, and so are all the nested dicts in it. If "arguments" was missing + or null in JSON, arguments is an empty dict. + + To handle the request, JsonMessageChannel tries to find a handler for this request + in JsonMessageChannel.handlers. Given command="X", if handlers.X_request exists, + then it is the specific handler for this request. Otherwise, handlers.request must + exist, and it is the generic handler for this request. A missing handler is a fatal + error. + + The handler is then invoked with the Request object as its sole argument. + + If the handler itself invokes respond() on the Request at any point, then it must + not return any value. + + Otherwise, if the handler returns NO_RESPONSE, no response to the request is sent. + It must be sent manually at some later point via respond(). + + Otherwise, a response to the request is sent with the returned value as the body. + + To fail the request, the handler can return an instance of MessageHandlingError, + or respond() with one, or raise one such that it applies_to() the Request object + being handled. + + Helper methods Message.isnt_valid() and Message.cant_handle() can be used to raise + the appropriate exception type that applies_to() the Request object. + """ + + def __init__(self, channel, seq, command, arguments, json=None): + super().__init__(channel, seq, json) + + self.command = command + + if isinstance(arguments, MessageDict) and hasattr(arguments, "associate_with"): + arguments.associate_with(self) + self.arguments = arguments + + self.response = None + """Response to this request. + + For incoming requests, it is set as soon as the request handler returns. + + For outgoing requests, it is set as soon as the response is received, and + before self._handle_response is invoked. + """ + + def describe(self): + return f"#{self.seq} request {json.repr(self.command)} from {self.channel}" + + @property + def payload(self): + return self.arguments + + def respond(self, body): + assert self.response is None + d = {"type": "response", "request_seq": self.seq, "command": self.command} + + if isinstance(body, Exception): + d["success"] = False + d["message"] = str(body) + else: + d["success"] = True + if body is not None and body != {}: + d["body"] = body + + with self.channel._send_message(d) as seq: + pass + self.response = Response(self.channel, seq, self, body) + + @staticmethod + def _parse(channel, message_dict): + seq = message_dict("seq", int) + command = message_dict("command", str) + arguments = message_dict("arguments", _payload) + message = Request(channel, seq, command, arguments, json=message_dict) + channel._enqueue_handlers(message, message._handle) + + def _handle(self): + channel = self.channel + handler = channel._get_handler_for("request", self.command) + try: + try: + result = handler(self) + except MessageHandlingError as exc: + if not exc.applies_to(self): + raise + result = exc + log.error( + "Handler {0}\ncouldn't handle {1}:\n{2}", + util.srcnameof(handler), + self.describe(), + str(exc), + ) + + if result is NO_RESPONSE: + assert self.response is None, ( + "Handler {0} for {1} must not return NO_RESPONSE if it has already " + "invoked request.respond().".format( + util.srcnameof(handler), self.describe() + ) + ) + elif self.response is not None: + assert result is None or result is self.response.body, ( + "Handler {0} for {1} must not return a response body if it has " + "already invoked request.respond().".format( + util.srcnameof(handler), self.describe() + ) + ) + else: + assert result is not None, ( + "Handler {0} for {1} must either call request.respond() before it " + "returns, or return the response body, or return NO_RESPONSE.".format( + util.srcnameof(handler), self.describe() + ) + ) + try: + self.respond(result) + except NoMoreMessages: + log.warning( + "Channel was closed before the response from handler {0} to {1} could be sent", + util.srcnameof(handler), + self.describe(), + ) + + except Exception: + log.reraise_exception( + "Handler {0}\ncouldn't handle {1}:", + util.srcnameof(handler), + self.describe(), + ) + + +class OutgoingRequest(Request): + """Represents an outgoing request, for which it is possible to wait for a + response to be received, and register a response handler. + """ + + _parse = _handle = None + + def __init__(self, channel, seq, command, arguments): + super().__init__(channel, seq, command, arguments) + self._response_handlers = [] + + def describe(self): + return f"{self.seq} request {json.repr(self.command)} to {self.channel}" + + def wait_for_response(self, raise_if_failed=True): + """Waits until a response is received for this request, records the Response + object for it in self.response, and returns response.body. + + If no response was received from the other party before the channel closed, + self.response is a synthesized Response with body=NoMoreMessages(). + + If raise_if_failed=True and response.success is False, raises response.body + instead of returning. + """ + + with self.channel: + while self.response is None: + self.channel._handlers_enqueued.wait() + + if raise_if_failed and not self.response.success: + raise self.response.body + return self.response.body + + def on_response(self, response_handler): + """Registers a handler to invoke when a response is received for this request. + The handler is invoked with Response as its sole argument. + + If response has already been received, invokes the handler immediately. + + It is guaranteed that self.response is set before the handler is invoked. + If no response was received from the other party before the channel closed, + self.response is a dummy Response with body=NoMoreMessages(). + + The handler is always invoked asynchronously on an unspecified background + thread - thus, the caller of on_response() can never be blocked or deadlocked + by the handler. + + No further incoming messages are processed until the handler returns, except for + responses to requests that have wait_for_response() invoked on them. + """ + + with self.channel: + self._response_handlers.append(response_handler) + self._enqueue_response_handlers() + + def _enqueue_response_handlers(self): + response = self.response + if response is None: + # Response._parse() will submit the handlers when response is received. + return + + def run_handlers(): + for handler in handlers: + try: + try: + handler(response) + except MessageHandlingError as exc: + if not exc.applies_to(response): + raise + log.error( + "Handler {0}\ncouldn't handle {1}:\n{2}", + util.srcnameof(handler), + response.describe(), + str(exc), + ) + except Exception: + log.reraise_exception( + "Handler {0}\ncouldn't handle {1}:", + util.srcnameof(handler), + response.describe(), + ) + + handlers = self._response_handlers[:] + self.channel._enqueue_handlers(response, run_handlers) + del self._response_handlers[:] + + +class Response(Message): + """Represents an incoming or an outgoing response to a Request. + + https://microsoft.github.io/debug-adapter-protocol/specification#response + + error_message corresponds to "message" in JSON, and is renamed for clarity. + + If success is False, body is None. Otherwise, it is a MessageDict associated + with this Response, and so are all the nested dicts in it. If "body" was missing + or null in JSON, body is an empty dict. + + If this is a response to an outgoing request, it will be handled by the handler + registered via self.request.on_response(), if any. + + Regardless of whether there is such a handler, OutgoingRequest.wait_for_response() + can also be used to retrieve and handle the response. If there is a handler, it is + executed before wait_for_response() returns. + + No further incoming messages are processed until the handler returns, except for + responses to requests that have wait_for_response() invoked on them. + + To report failure to handle the event, the handler must raise an instance of + MessageHandlingError that applies_to() the Response object it was handling. Any + such failure is logged, after which the message loop moves on to the next message. + + Helper methods Message.isnt_valid() and Message.cant_handle() can be used to raise + the appropriate exception type that applies_to() the Response object. + """ + + def __init__(self, channel, seq, request, body, json=None): + super().__init__(channel, seq, json) + + self.request = request + """The request to which this is the response.""" + + if isinstance(body, MessageDict) and hasattr(body, "associate_with"): + body.associate_with(self) + self.body = body + """Body of the response if the request was successful, or an instance + of some class derived from Exception it it was not. + + If a response was received from the other side, but request failed, it is an + instance of MessageHandlingError containing the received error message. If the + error message starts with InvalidMessageError.PREFIX, then it's an instance of + the InvalidMessageError specifically, and that prefix is stripped. + + If no response was received from the other party before the channel closed, + it is an instance of NoMoreMessages. + """ + + def describe(self): + return f"#{self.seq} response to {self.request.describe()}" + + @property + def payload(self): + return self.body + + @property + def success(self): + """Whether the request succeeded or not.""" + return not isinstance(self.body, Exception) + + @property + def result(self): + """Result of the request. Returns the value of response.body, unless it + is an exception, in which case it is raised instead. + """ + if self.success: + return self.body + else: + raise self.body + + @staticmethod + def _parse(channel, message_dict, body=None): + seq = message_dict("seq", int) if (body is None) else None + request_seq = message_dict("request_seq", int) + command = message_dict("command", str) + success = message_dict("success", bool) + if body is None: + if success: + body = message_dict("body", _payload) + else: + error_message = message_dict("message", str) + exc_type = MessageHandlingError + if error_message.startswith(InvalidMessageError.PREFIX): + error_message = error_message[len(InvalidMessageError.PREFIX) :] + exc_type = InvalidMessageError + body = exc_type(error_message, silent=True) + + try: + with channel: + request = channel._sent_requests.pop(request_seq) + known_request = True + except KeyError: + # Synthetic Request that only has seq and command as specified in response + # JSON, for error reporting purposes. + request = OutgoingRequest(channel, request_seq, command, "") + known_request = False + + if not success: + body.cause = request + + response = Response(channel, seq, request, body, json=message_dict) + + with channel: + request.response = response + request._enqueue_response_handlers() + + if known_request: + return response + else: + raise response.isnt_valid( + "request_seq={0} does not match any known request", request_seq + ) + + +class Disconnect(Message): + """A dummy message used to represent disconnect. It's always the last message + received from any channel. + """ + + def __init__(self, channel): + super().__init__(channel, None) + + def describe(self): + return f"disconnect from {self.channel}" + + +class MessageHandlingError(Exception): + """Indicates that a message couldn't be handled for some reason. + + If the reason is a contract violation - i.e. the message that was handled did not + conform to the protocol specification - InvalidMessageError, which is a subclass, + should be used instead. + + If any message handler raises an exception not derived from this class, it will + escape the message loop unhandled, and terminate the process. + + If any message handler raises this exception, but applies_to(message) is False, it + is treated as if it was a generic exception, as desribed above. Thus, if a request + handler issues another request of its own, and that one fails, the failure is not + silently propagated. However, a request that is delegated via Request.delegate() + will also propagate failures back automatically. For manual propagation, catch the + exception, and call exc.propagate(). + + If any event handler raises this exception, and applies_to(event) is True, the + exception is silently swallowed by the message loop. + + If any request handler raises this exception, and applies_to(request) is True, the + exception is silently swallowed by the message loop, and a failure response is sent + with "message" set to str(reason). + + Note that, while errors are not logged when they're swallowed by the message loop, + by that time they have already been logged by their __init__ (when instantiated). + """ + + def __init__(self, reason, cause=None, silent=False): + """Creates a new instance of this class, and immediately logs the exception. + + Message handling errors are logged immediately unless silent=True, so that the + precise context in which they occured can be determined from the surrounding + log entries. + """ + + self.reason = reason + """Why it couldn't be handled. This can be any object, but usually it's either + str or Exception. + """ + + assert cause is None or isinstance(cause, Message) + self.cause = cause + """The Message object for the message that couldn't be handled. For responses + to unknown requests, this is a synthetic Request. + """ + + if not silent: + try: + raise self + except MessageHandlingError: + log.swallow_exception() + + def __hash__(self): + return hash((self.reason, id(self.cause))) + + def __eq__(self, other): + if not isinstance(other, MessageHandlingError): + return NotImplemented + if type(self) is not type(other): + return NotImplemented + if self.reason != other.reason: + return False + if self.cause is not None and other.cause is not None: + if self.cause.seq != other.cause.seq: + return False + return True + + def __ne__(self, other): + return not self == other + + def __str__(self): + return str(self.reason) + + def __repr__(self): + s = type(self).__name__ + if self.cause is None: + s += f"reason={self.reason!r})" + else: + s += f"channel={self.cause.channel.name!r}, cause={self.cause.seq!r}, reason={self.reason!r})" + return s + + def applies_to(self, message): + """Whether this MessageHandlingError can be treated as a reason why the + handling of message failed. + + If self.cause is None, this is always true. + + If self.cause is not None, this is only true if cause is message. + """ + return self.cause is None or self.cause is message + + def propagate(self, new_cause): + """Propagates this error, raising a new instance of the same class with the + same reason, but a different cause. + """ + raise type(self)(self.reason, new_cause, silent=True) + + +class InvalidMessageError(MessageHandlingError): + """Indicates that an incoming message did not follow the protocol specification - + for example, it was missing properties that are required, or the message itself + is not allowed in the current state. + + Raised by MessageDict in lieu of KeyError for missing keys. + """ + + PREFIX = "Invalid message: " + """Automatically prepended to the "message" property in JSON responses, when the + handler raises InvalidMessageError. + + If a failed response has "message" property that starts with this prefix, it is + reported as InvalidMessageError rather than MessageHandlingError. + """ + + def __str__(self): + return InvalidMessageError.PREFIX + str(self.reason) + + +class JsonMessageChannel(object): + """Implements a JSON message channel on top of a raw JSON message stream, with + support for DAP requests, responses, and events. + + The channel can be locked for exclusive use via the with-statement:: + + with channel: + channel.send_request(...) + # No interleaving messages can be sent here from other threads. + channel.send_event(...) + """ + + def __init__(self, stream, handlers=None, name=None): + self.stream = stream + self.handlers = handlers + self.name = name if name is not None else stream.name + self.started = False + self._lock = threading.RLock() + self._closed = False + self._seq_iter = itertools.count(1) + self._sent_requests = {} # {seq: Request} + self._handler_queue = [] # [(what, handler)] + self._handlers_enqueued = threading.Condition(self._lock) + self._handler_thread = None + self._parser_thread = None + + def __str__(self): + return self.name + + def __repr__(self): + return f"{type(self).__name__}({self.name!r})" + + def __enter__(self): + self._lock.acquire() + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + self._lock.release() + + def close(self): + """Closes the underlying stream. + + This does not immediately terminate any handlers that are already executing, + but they will be unable to respond. No new request or event handlers will + execute after this method is called, even for messages that have already been + received. However, response handlers will continue to executed for any request + that is still pending, as will any handlers registered via on_response(). + """ + with self: + if not self._closed: + self._closed = True + self.stream.close() + + def start(self): + """Starts a message loop which parses incoming messages and invokes handlers + for them on a background thread, until the channel is closed. + + Incoming messages, including responses to requests, will not be processed at + all until this is invoked. + """ + + assert not self.started + self.started = True + + self._parser_thread = threading.Thread( + target=self._parse_incoming_messages, name=f"{self} message parser" + ) + + hide_thread_from_debugger(self._parser_thread) + self._parser_thread.daemon = True + self._parser_thread.start() + + def wait(self): + """Waits for the message loop to terminate, and for all enqueued Response + message handlers to finish executing. + """ + parser_thread = self._parser_thread + try: + if parser_thread is not None: + parser_thread.join() + except AssertionError: + log.debug("Handled error joining parser thread.") + try: + handler_thread = self._handler_thread + if handler_thread is not None: + handler_thread.join() + except AssertionError: + log.debug("Handled error joining handler thread.") + + # Order of keys for _prettify() - follows the order of properties in + # https://microsoft.github.io/debug-adapter-protocol/specification + _prettify_order = ( + "seq", + "type", + "request_seq", + "success", + "command", + "event", + "message", + "arguments", + "body", + "error", + ) + + def _prettify(self, message_dict): + """Reorders items in a MessageDict such that it is more readable.""" + for key in self._prettify_order: + if key not in message_dict: + continue + value = message_dict[key] + del message_dict[key] + message_dict[key] = value + + @contextlib.contextmanager + def _send_message(self, message): + """Sends a new message to the other party. + + Generates a new sequence number for the message, and provides it to the + caller before the message is sent, using the context manager protocol:: + + with send_message(...) as seq: + # The message hasn't been sent yet. + ... + # Now the message has been sent. + + Safe to call concurrently for the same channel from different threads. + """ + + assert "seq" not in message + with self: + seq = next(self._seq_iter) + + message = MessageDict(None, message) + message["seq"] = seq + self._prettify(message) + + with self: + yield seq + self.stream.write_json(message) + + def send_request(self, command, arguments=None, on_before_send=None): + """Sends a new request, and returns the OutgoingRequest object for it. + + If arguments is None or {}, "arguments" will be omitted in JSON. + + If on_before_send is not None, invokes on_before_send() with the request + object as the sole argument, before the request actually gets sent. + + Does not wait for response - use OutgoingRequest.wait_for_response(). + + Safe to call concurrently for the same channel from different threads. + """ + + d = {"type": "request", "command": command} + if arguments is not None and arguments != {}: + d["arguments"] = arguments + + with self._send_message(d) as seq: + request = OutgoingRequest(self, seq, command, arguments) + if on_before_send is not None: + on_before_send(request) + self._sent_requests[seq] = request + return request + + def send_event(self, event, body=None): + """Sends a new event. + + If body is None or {}, "body" will be omitted in JSON. + + Safe to call concurrently for the same channel from different threads. + """ + + d = {"type": "event", "event": event} + if body is not None and body != {}: + d["body"] = body + + with self._send_message(d): + pass + + def request(self, *args, **kwargs): + """Same as send_request(...).wait_for_response()""" + return self.send_request(*args, **kwargs).wait_for_response() + + def propagate(self, message): + """Sends a new message with the same type and payload. + + If it was a request, returns the new OutgoingRequest object for it. + """ + assert message.is_request() or message.is_event() + if message.is_request(): + return self.send_request(message.command, message.arguments) + else: + self.send_event(message.event, message.body) + + def delegate(self, message): + """Like propagate(message).wait_for_response(), but will also propagate + any resulting MessageHandlingError back. + """ + try: + result = self.propagate(message) + if result.is_request(): + result = result.wait_for_response() + return result + except MessageHandlingError as exc: + exc.propagate(message) + + def _parse_incoming_messages(self): + log.debug("Starting message loop for channel {0}", self) + try: + while True: + self._parse_incoming_message() + + except NoMoreMessages as exc: + log.debug("Exiting message loop for channel {0}: {1}", self, exc) + with self: + # Generate dummy responses for all outstanding requests. + err_message = str(exc) + + # Response._parse() will remove items from _sent_requests, so + # make a snapshot before iterating. + sent_requests = list(self._sent_requests.values()) + + for request in sent_requests: + response_json = MessageDict( + None, + { + "seq": -1, + "request_seq": request.seq, + "command": request.command, + "success": False, + "message": err_message, + }, + ) + Response._parse(self, response_json, body=exc) + assert not len(self._sent_requests) + + self._enqueue_handlers(Disconnect(self), self._handle_disconnect) + self.close() + + _message_parsers = { + "event": Event._parse, + "request": Request._parse, + "response": Response._parse, + } + + def _parse_incoming_message(self): + """Reads incoming messages, parses them, and puts handlers into the queue + for _run_handlers() to invoke, until the channel is closed. + """ + + # Set up a dedicated decoder for this message, to create MessageDict instances + # for all JSON objects, and track them so that they can be later wired up to + # the Message they belong to, once it is instantiated. + def object_hook(d): + d = MessageDict(None, d) + if "seq" in d: + self._prettify(d) + d.associate_with = associate_with + message_dicts.append(d) + return d + + # A hack to work around circular dependency between messages, and instances of + # MessageDict in their payload. We need to set message for all of them, but it + # cannot be done until the actual Message is created - which happens after the + # dicts are created during deserialization. + # + # So, upon deserialization, every dict in the message payload gets a method + # that can be called to set MessageDict.message for *all* dicts belonging to + # that message. This method can then be invoked on the top-level dict by the + # parser, after it has parsed enough of the dict to create the appropriate + # instance of Event, Request, or Response for this message. + def associate_with(message): + for d in message_dicts: + d.message = message + del d.associate_with + + message_dicts = [] + decoder = self.stream.json_decoder_factory(object_hook=object_hook) + message_dict = self.stream.read_json(decoder) + assert isinstance(message_dict, MessageDict) # make sure stream used decoder + + msg_type = message_dict("type", json.enum("event", "request", "response")) + parser = self._message_parsers[msg_type] + try: + parser(self, message_dict) + except InvalidMessageError as exc: + log.error( + "Failed to parse message in channel {0}: {1} in:\n{2}", + self, + str(exc), + json.repr(message_dict), + ) + except Exception as exc: + if isinstance(exc, NoMoreMessages) and exc.stream is self.stream: + raise + log.swallow_exception( + "Fatal error in channel {0} while parsing:\n{1}", + self, + json.repr(message_dict), + ) + os._exit(1) + + def _enqueue_handlers(self, what, *handlers): + """Enqueues handlers for _run_handlers() to run. + + `what` is the Message being handled, and is used for logging purposes. + + If the background thread with _run_handlers() isn't running yet, starts it. + """ + + with self: + self._handler_queue.extend((what, handler) for handler in handlers) + self._handlers_enqueued.notify_all() + + # If there is anything to handle, but there's no handler thread yet, + # spin it up. This will normally happen only once, on the first call + # to _enqueue_handlers(), and that thread will run all the handlers + # for parsed messages. However, this can also happen is somebody calls + # Request.on_response() - possibly concurrently from multiple threads - + # after the channel has already been closed, and the initial handler + # thread has exited. In this case, we spin up a new thread just to run + # the enqueued response handlers, and it will exit as soon as it's out + # of handlers to run. + if len(self._handler_queue) and self._handler_thread is None: + self._handler_thread = threading.Thread( + target=self._run_handlers, + name=f"{self} message handler", + ) + hide_thread_from_debugger(self._handler_thread) + self._handler_thread.start() + + def _run_handlers(self): + """Runs enqueued handlers until the channel is closed, or until the handler + queue is empty once the channel is closed. + """ + + while True: + with self: + closed = self._closed + if closed: + # Wait for the parser thread to wrap up and enqueue any remaining + # handlers, if it is still running. + self._parser_thread.join() + # From this point on, _enqueue_handlers() can only get called + # from Request.on_response(). + + with self: + if not closed and not len(self._handler_queue): + # Wait for something to process. + self._handlers_enqueued.wait() + + # Make a snapshot before releasing the lock. + handlers = self._handler_queue[:] + del self._handler_queue[:] + + if closed and not len(handlers): + # Nothing to process, channel is closed, and parser thread is + # not running anymore - time to quit! If Request.on_response() + # needs to call _enqueue_handlers() later, it will spin up + # a new handler thread. + self._handler_thread = None + return + + for what, handler in handlers: + # If the channel is closed, we don't want to process any more events + # or requests - only responses and the final disconnect handler. This + # is to guarantee that if a handler calls close() on its own channel, + # the corresponding request or event is the last thing to be processed. + if closed and handler in (Event._handle, Request._handle): + continue + + with log.prefixed("/handling {0}/\n", what.describe()): + try: + handler() + except Exception: + # It's already logged by the handler, so just fail fast. + self.close() + os._exit(1) + + def _get_handler_for(self, type, name): + """Returns the handler for a message of a given type.""" + + with self: + handlers = self.handlers + + for handler_name in (name + "_" + type, type): + try: + return getattr(handlers, handler_name) + except AttributeError: + continue + + raise AttributeError( + "handler object {0} for channel {1} has no handler for {2} {3!r}".format( + util.srcnameof(handlers), + self, + type, + name, + ) + ) + + def _handle_disconnect(self): + handler = getattr(self.handlers, "disconnect", lambda: None) + try: + handler() + except Exception: + log.reraise_exception( + "Handler {0}\ncouldn't handle disconnect from {1}:", + util.srcnameof(handler), + self, + ) + + +class MessageHandlers(object): + """A simple delegating message handlers object for use with JsonMessageChannel. + For every argument provided, the object gets an attribute with the corresponding + name and value. + """ + + def __init__(self, **kwargs): + for name, func in kwargs.items(): + setattr(self, name, func) diff --git a/venv/lib/python3.8/site-packages/debugpy/common/singleton.py b/venv/lib/python3.8/site-packages/debugpy/common/singleton.py new file mode 100644 index 0000000..d515a4a --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/common/singleton.py @@ -0,0 +1,185 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import functools +import threading + + +class Singleton(object): + """A base class for a class of a singleton object. + + For any derived class T, the first invocation of T() will create the instance, + and any future invocations of T() will return that instance. + + Concurrent invocations of T() from different threads are safe. + """ + + # A dual-lock scheme is necessary to be thread safe while avoiding deadlocks. + # _lock_lock is shared by all singleton types, and is used to construct their + # respective _lock instances when invoked for a new type. Then _lock is used + # to synchronize all further access for that type, including __init__. This way, + # __init__ for any given singleton can access another singleton, and not get + # deadlocked if that other singleton is trying to access it. + _lock_lock = threading.RLock() + _lock = None + + # Specific subclasses will get their own _instance set in __new__. + _instance = None + + _is_shared = None # True if shared, False if exclusive + + def __new__(cls, *args, **kwargs): + # Allow arbitrary args and kwargs if shared=False, because that is guaranteed + # to construct a new singleton if it succeeds. Otherwise, this call might end + # up returning an existing instance, which might have been constructed with + # different arguments, so allowing them is misleading. + assert not kwargs.get("shared", False) or (len(args) + len(kwargs)) == 0, ( + "Cannot use constructor arguments when accessing a Singleton without " + "specifying shared=False." + ) + + # Avoid locking as much as possible with repeated double-checks - the most + # common path is when everything is already allocated. + if not cls._instance: + # If there's no per-type lock, allocate it. + if cls._lock is None: + with cls._lock_lock: + if cls._lock is None: + cls._lock = threading.RLock() + + # Now that we have a per-type lock, we can synchronize construction. + if not cls._instance: + with cls._lock: + if not cls._instance: + cls._instance = object.__new__(cls) + # To prevent having __init__ invoked multiple times, call + # it here directly, and then replace it with a stub that + # does nothing - that stub will get auto-invoked on return, + # and on all future singleton accesses. + cls._instance.__init__() + cls.__init__ = lambda *args, **kwargs: None + + return cls._instance + + def __init__(self, *args, **kwargs): + """Initializes the singleton instance. Guaranteed to only be invoked once for + any given type derived from Singleton. + + If shared=False, the caller is requesting a singleton instance for their own + exclusive use. This is only allowed if the singleton has not been created yet; + if so, it is created and marked as being in exclusive use. While it is marked + as such, all attempts to obtain an existing instance of it immediately raise + an exception. The singleton can eventually be promoted to shared use by calling + share() on it. + """ + + shared = kwargs.pop("shared", True) + with self: + if shared: + assert ( + type(self)._is_shared is not False + ), "Cannot access a non-shared Singleton." + type(self)._is_shared = True + else: + assert type(self)._is_shared is None, "Singleton is already created." + + def __enter__(self): + """Lock this singleton to prevent concurrent access.""" + type(self)._lock.acquire() + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + """Unlock this singleton to allow concurrent access.""" + type(self)._lock.release() + + def share(self): + """Share this singleton, if it was originally created with shared=False.""" + type(self)._is_shared = True + + +class ThreadSafeSingleton(Singleton): + """A singleton that incorporates a lock for thread-safe access to its members. + + The lock can be acquired using the context manager protocol, and thus idiomatic + use is in conjunction with a with-statement. For example, given derived class T:: + + with T() as t: + t.x = t.frob(t.y) + + All access to the singleton from the outside should follow this pattern for both + attributes and method calls. Singleton members can assume that self is locked by + the caller while they're executing, but recursive locking of the same singleton + on the same thread is also permitted. + """ + + threadsafe_attrs = frozenset() + """Names of attributes that are guaranteed to be used in a thread-safe manner. + + This is typically used in conjunction with share() to simplify synchronization. + """ + + readonly_attrs = frozenset() + """Names of attributes that are readonly. These can be read without locking, but + cannot be written at all. + + Every derived class gets its own separate set. Thus, for any given singleton type + T, an attribute can be made readonly after setting it, with T.readonly_attrs.add(). + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # Make sure each derived class gets a separate copy. + type(self).readonly_attrs = set(type(self).readonly_attrs) + + # Prevent callers from reading or writing attributes without locking, except for + # reading attributes listed in threadsafe_attrs, and methods specifically marked + # with @threadsafe_method. Such methods should perform the necessary locking to + # ensure thread safety for the callers. + + @staticmethod + def assert_locked(self): + lock = type(self)._lock + assert lock.acquire(blocking=False), ( + "ThreadSafeSingleton accessed without locking. Either use with-statement, " + "or if it is a method or property, mark it as @threadsafe_method or with " + "@autolocked_method, as appropriate." + ) + lock.release() + + def __getattribute__(self, name): + value = object.__getattribute__(self, name) + if name not in (type(self).threadsafe_attrs | type(self).readonly_attrs): + if not getattr(value, "is_threadsafe_method", False): + ThreadSafeSingleton.assert_locked(self) + return value + + def __setattr__(self, name, value): + assert name not in type(self).readonly_attrs, "This attribute is read-only." + if name not in type(self).threadsafe_attrs: + ThreadSafeSingleton.assert_locked(self) + return object.__setattr__(self, name, value) + + +def threadsafe_method(func): + """Marks a method of a ThreadSafeSingleton-derived class as inherently thread-safe. + + A method so marked must either not use any singleton state, or lock it appropriately. + """ + + func.is_threadsafe_method = True + return func + + +def autolocked_method(func): + """Automatically synchronizes all calls of a method of a ThreadSafeSingleton-derived + class by locking the singleton for the duration of each call. + """ + + @functools.wraps(func) + @threadsafe_method + def lock_and_call(self, *args, **kwargs): + with self: + return func(self, *args, **kwargs) + + return lock_and_call diff --git a/venv/lib/python3.8/site-packages/debugpy/common/sockets.py b/venv/lib/python3.8/site-packages/debugpy/common/sockets.py new file mode 100644 index 0000000..e5f820d --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/common/sockets.py @@ -0,0 +1,122 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import socket +import sys +import threading + +from debugpy.common import log +from debugpy.common.util import hide_thread_from_debugger + + +def create_server(host, port=0, backlog=socket.SOMAXCONN, timeout=None): + """Return a local server socket listening on the given port.""" + + assert backlog > 0 + if host is None: + host = "127.0.0.1" + if port is None: + port = 0 + + try: + server = _new_sock() + server.bind((host, port)) + if timeout is not None: + server.settimeout(timeout) + server.listen(backlog) + except Exception: + server.close() + raise + return server + + +def create_client(): + """Return a client socket that may be connected to a remote address.""" + return _new_sock() + + +def _new_sock(): + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) + if sys.platform == "win32": + sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + # Set TCP keepalive on an open socket. + # It activates after 1 second (TCP_KEEPIDLE,) of idleness, + # then sends a keepalive ping once every 3 seconds (TCP_KEEPINTVL), + # and closes the connection after 5 failed ping (TCP_KEEPCNT), or 15 seconds + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + except (AttributeError, OSError): + pass # May not be available everywhere. + try: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 1) + except (AttributeError, OSError): + pass # May not be available everywhere. + try: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 3) + except (AttributeError, OSError): + pass # May not be available everywhere. + try: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5) + except (AttributeError, OSError): + pass # May not be available everywhere. + return sock + + +def shut_down(sock, how=socket.SHUT_RDWR): + """Shut down the given socket.""" + sock.shutdown(how) + + +def close_socket(sock): + """Shutdown and close the socket.""" + try: + shut_down(sock) + except Exception: + pass + sock.close() + + +def serve(name, handler, host, port=0, backlog=socket.SOMAXCONN, timeout=None): + """Accepts TCP connections on the specified host and port, and invokes the + provided handler function for every new connection. + + Returns the created server socket. + """ + + assert backlog > 0 + + try: + listener = create_server(host, port, backlog, timeout) + except Exception: + log.reraise_exception( + "Error listening for incoming {0} connections on {1}:{2}:", name, host, port + ) + host, port = listener.getsockname() + log.info("Listening for incoming {0} connections on {1}:{2}...", name, host, port) + + def accept_worker(): + while True: + try: + sock, (other_host, other_port) = listener.accept() + except (OSError, socket.error): + # Listener socket has been closed. + break + + log.info( + "Accepted incoming {0} connection from {1}:{2}.", + name, + other_host, + other_port, + ) + handler(sock) + + thread = threading.Thread(target=accept_worker) + thread.daemon = True + hide_thread_from_debugger(thread) + thread.start() + + return listener diff --git a/venv/lib/python3.8/site-packages/debugpy/common/stacks.py b/venv/lib/python3.8/site-packages/debugpy/common/stacks.py new file mode 100644 index 0000000..a7bd16f --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/common/stacks.py @@ -0,0 +1,62 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +"""Provides facilities to dump all stacks of all threads in the process. +""" + +import os +import sys +import time +import threading +import traceback + +from debugpy.common import log + + +def dump(): + """Dump stacks of all threads in this process, except for the current thread.""" + + tid = threading.current_thread().ident + pid = os.getpid() + + log.info("Dumping stacks for process {0}...", pid) + + for t_ident, frame in sys._current_frames().items(): + if t_ident == tid: + continue + + for t in threading.enumerate(): + if t.ident == tid: + t_name = t.name + t_daemon = t.daemon + break + else: + t_name = t_daemon = "" + + stack = "".join(traceback.format_stack(frame)) + log.info( + "Stack of thread {0} (tid={1}, pid={2}, daemon={3}):\n\n{4}", + t_name, + t_ident, + pid, + t_daemon, + stack, + ) + + log.info("Finished dumping stacks for process {0}.", pid) + + +def dump_after(secs): + """Invokes dump() on a background thread after waiting for the specified time.""" + + def dumper(): + time.sleep(secs) + try: + dump() + except: + log.swallow_exception() + + thread = threading.Thread(target=dumper) + thread.daemon = True + thread.start() diff --git a/venv/lib/python3.8/site-packages/debugpy/common/timestamp.py b/venv/lib/python3.8/site-packages/debugpy/common/timestamp.py new file mode 100644 index 0000000..2913b60 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/common/timestamp.py @@ -0,0 +1,22 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +"""Provides monotonic timestamps with a resetable zero. +""" + +import time + +__all__ = ["current", "reset"] + + +def current(): + return time.monotonic() - timestamp_zero + + +def reset(): + global timestamp_zero + timestamp_zero = time.monotonic() + + +reset() diff --git a/venv/lib/python3.8/site-packages/debugpy/common/util.py b/venv/lib/python3.8/site-packages/debugpy/common/util.py new file mode 100644 index 0000000..54850a0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/common/util.py @@ -0,0 +1,164 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import inspect +import os +import sys + + +def evaluate(code, path=__file__, mode="eval"): + # Setting file path here to avoid breaking here if users have set + # "break on exception raised" setting. This code can potentially run + # in user process and is indistinguishable if the path is not set. + # We use the path internally to skip exception inside the debugger. + expr = compile(code, path, "eval") + return eval(expr, {}, sys.modules) + + +class Observable(object): + """An object with change notifications.""" + + observers = () # used when attributes are set before __init__ is invoked + + def __init__(self): + self.observers = [] + + def __setattr__(self, name, value): + try: + return super().__setattr__(name, value) + finally: + for ob in self.observers: + ob(self, name) + + +class Env(dict): + """A dict for environment variables.""" + + @staticmethod + def snapshot(): + """Returns a snapshot of the current environment.""" + return Env(os.environ) + + def copy(self, updated_from=None): + result = Env(self) + if updated_from is not None: + result.update(updated_from) + return result + + def prepend_to(self, key, entry): + """Prepends a new entry to a PATH-style environment variable, creating + it if it doesn't exist already. + """ + try: + tail = os.path.pathsep + self[key] + except KeyError: + tail = "" + self[key] = entry + tail + + +def force_str(s, encoding, errors="strict"): + """Converts s to str, using the provided encoding. If s is already str, + it is returned as is. + """ + return s.decode(encoding, errors) if isinstance(s, bytes) else str(s) + + +def force_bytes(s, encoding, errors="strict"): + """Converts s to bytes, using the provided encoding. If s is already bytes, + it is returned as is. + + If errors="strict" and s is bytes, its encoding is verified by decoding it; + UnicodeError is raised if it cannot be decoded. + """ + if isinstance(s, str): + return s.encode(encoding, errors) + else: + s = bytes(s) + if errors == "strict": + # Return value ignored - invoked solely for verification. + s.decode(encoding, errors) + return s + + +def force_ascii(s, errors="strict"): + """Same as force_bytes(s, "ascii", errors)""" + return force_bytes(s, "ascii", errors) + + +def force_utf8(s, errors="strict"): + """Same as force_bytes(s, "utf8", errors)""" + return force_bytes(s, "utf8", errors) + + +def nameof(obj, quote=False): + """Returns the most descriptive name of a Python module, class, or function, + as a Unicode string + + If quote=True, name is quoted with repr(). + + Best-effort, but guaranteed to not fail - always returns something. + """ + + try: + name = obj.__qualname__ + except Exception: + try: + name = obj.__name__ + except Exception: + # Fall back to raw repr(), and skip quoting. + try: + name = repr(obj) + except Exception: + return "" + else: + quote = False + + if quote: + try: + name = repr(name) + except Exception: + pass + + return force_str(name, "utf-8", "replace") + + +def srcnameof(obj): + """Returns the most descriptive name of a Python module, class, or function, + including source information (filename and linenumber), if available. + + Best-effort, but guaranteed to not fail - always returns something. + """ + + name = nameof(obj, quote=True) + + # Get the source information if possible. + try: + src_file = inspect.getsourcefile(obj) + except Exception: + pass + else: + name += f" (file {src_file!r}" + try: + _, src_lineno = inspect.getsourcelines(obj) + except Exception: + pass + else: + name += f", line {src_lineno}" + name += ")" + + return name + + +def hide_debugpy_internals(): + """Returns True if the caller should hide something from debugpy.""" + return "DEBUGPY_TRACE_DEBUGPY" not in os.environ + + +def hide_thread_from_debugger(thread): + """Disables tracing for the given thread if DEBUGPY_TRACE_DEBUGPY is not set. + DEBUGPY_TRACE_DEBUGPY is used to debug debugpy with debugpy + """ + if hide_debugpy_internals(): + thread.pydev_do_not_trace = True + thread.is_pydev_daemon_thread = True diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/__init__.py b/venv/lib/python3.8/site-packages/debugpy/launcher/__init__.py new file mode 100644 index 0000000..a6e0934 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/launcher/__init__.py @@ -0,0 +1,32 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +__all__ = [] + + +adapter_host = None +"""The host on which adapter is running and listening for incoming connections +from the launcher and the servers.""" + +channel = None +"""DAP message channel to the adapter.""" + + +def connect(host, port): + from debugpy.common import log, messaging, sockets + from debugpy.launcher import handlers + + global channel, adapter_host + assert channel is None + assert adapter_host is None + + log.info("Connecting to adapter at {0}:{1}", host, port) + + sock = sockets.create_client() + sock.connect((host, port)) + adapter_host = host + + stream = messaging.JsonIOStream.from_socket(sock, "Adapter") + channel = messaging.JsonMessageChannel(stream, handlers=handlers) + channel.start() diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/__main__.py b/venv/lib/python3.8/site-packages/debugpy/launcher/__main__.py new file mode 100644 index 0000000..cff18b5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/launcher/__main__.py @@ -0,0 +1,91 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +__all__ = ["main"] + +import locale +import signal +import sys + +# WARNING: debugpy and submodules must not be imported on top level in this module, +# and should be imported locally inside main() instead. + + +def main(): + from debugpy import launcher + from debugpy.common import log + from debugpy.launcher import debuggee + + log.to_file(prefix="debugpy.launcher") + log.describe_environment("debugpy.launcher startup environment:") + + if sys.platform == "win32": + # For windows, disable exceptions on Ctrl+C - we want to allow the debuggee + # process to handle these, or not, as it sees fit. If the debuggee exits + # on Ctrl+C, the launcher will also exit, so it doesn't need to observe + # the signal directly. + signal.signal(signal.SIGINT, signal.SIG_IGN) + + # Everything before "--" is command line arguments for the launcher itself, + # and everything after "--" is command line arguments for the debuggee. + log.info("sys.argv before parsing: {0}", sys.argv) + sep = sys.argv.index("--") + launcher_argv = sys.argv[1:sep] + sys.argv[:] = [sys.argv[0]] + sys.argv[sep + 1 :] + log.info("sys.argv after patching: {0}", sys.argv) + + # The first argument specifies the host/port on which the adapter is waiting + # for launcher to connect. It's either host:port, or just port. + adapter = launcher_argv[0] + host, sep, port = adapter.partition(":") + if not sep: + host = "127.0.0.1" + port = adapter + port = int(port) + + launcher.connect(host, port) + launcher.channel.wait() + + if debuggee.process is not None: + sys.exit(debuggee.process.returncode) + + +if __name__ == "__main__": + # debugpy can also be invoked directly rather than via -m. In this case, the first + # entry on sys.path is the one added automatically by Python for the directory + # containing this file. This means that import debugpy will not work, since we need + # the parent directory of debugpy/ to be in sys.path, rather than debugpy/launcher/. + # + # The other issue is that many other absolute imports will break, because they + # will be resolved relative to debugpy/launcher/ - e.g. `import state` will then try + # to import debugpy/launcher/state.py. + # + # To fix both, we need to replace the automatically added entry such that it points + # at parent directory of debugpy/ instead of debugpy/launcher, import debugpy with that + # in sys.path, and then remove the first entry entry altogether, so that it doesn't + # affect any further imports we might do. For example, suppose the user did: + # + # python /foo/bar/debugpy/launcher ... + # + # At the beginning of this script, sys.path will contain "/foo/bar/debugpy/launcher" + # as the first entry. What we want is to replace it with "/foo/bar', then import + # debugpy with that in effect, and then remove the replaced entry before any more + # code runs. The imported debugpy module will remain in sys.modules, and thus all + # future imports of it or its submodules will resolve accordingly. + if "debugpy" not in sys.modules: + # Do not use dirname() to walk up - this can be a relative path, e.g. ".". + sys.path[0] = sys.path[0] + "/../../" + __import__("debugpy") + del sys.path[0] + + # Apply OS-global and user-specific locale settings. + try: + locale.setlocale(locale.LC_ALL, "") + except Exception: + # On POSIX, locale is set via environment variables, and this can fail if + # those variables reference a non-existing locale. Ignore and continue using + # the default "C" locale if so. + pass + + main() diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4b7dd9f Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/__main__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000..503deed Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/__main__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/debuggee.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/debuggee.cpython-38.pyc new file mode 100644 index 0000000..ca1c0f1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/debuggee.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/handlers.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/handlers.cpython-38.pyc new file mode 100644 index 0000000..cf087ef Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/handlers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/output.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/output.cpython-38.pyc new file mode 100644 index 0000000..f130acc Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/output.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/winapi.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/winapi.cpython-38.pyc new file mode 100644 index 0000000..a624acc Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/launcher/__pycache__/winapi.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/debuggee.py b/venv/lib/python3.8/site-packages/debugpy/launcher/debuggee.py new file mode 100644 index 0000000..2d85288 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/launcher/debuggee.py @@ -0,0 +1,249 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import atexit +import ctypes +import os +import signal +import struct +import subprocess +import sys +import threading + +from debugpy import launcher +from debugpy.common import log, messaging +from debugpy.launcher import output + +if sys.platform == "win32": + from debugpy.launcher import winapi + + +process = None +"""subprocess.Popen instance for the debuggee process.""" + +job_handle = None +"""On Windows, the handle for the job object to which the debuggee is assigned.""" + +wait_on_exit_predicates = [] +"""List of functions that determine whether to pause after debuggee process exits. + +Every function is invoked with exit code as the argument. If any of the functions +returns True, the launcher pauses and waits for user input before exiting. +""" + + +def describe(): + return f"Debuggee[PID={process.pid}]" + + +def spawn(process_name, cmdline, env, redirect_output): + log.info( + "Spawning debuggee process:\n\n" + "Command line: {0!r}\n\n" + "Environment variables: {1!r}\n\n", + cmdline, + env, + ) + + close_fds = set() + try: + if redirect_output: + # subprocess.PIPE behavior can vary substantially depending on Python version + # and platform; using our own pipes keeps it simple, predictable, and fast. + stdout_r, stdout_w = os.pipe() + stderr_r, stderr_w = os.pipe() + close_fds |= {stdout_r, stdout_w, stderr_r, stderr_w} + kwargs = dict(stdout=stdout_w, stderr=stderr_w) + else: + kwargs = {} + + if sys.platform != "win32": + + def preexec_fn(): + try: + # Start the debuggee in a new process group, so that the launcher can + # kill the entire process tree later. + os.setpgrp() + + # Make the new process group the foreground group in its session, so + # that it can interact with the terminal. The debuggee will receive + # SIGTTOU when tcsetpgrp() is called, and must ignore it. + old_handler = signal.signal(signal.SIGTTOU, signal.SIG_IGN) + try: + tty = os.open("/dev/tty", os.O_RDWR) + try: + os.tcsetpgrp(tty, os.getpgrp()) + finally: + os.close(tty) + finally: + signal.signal(signal.SIGTTOU, old_handler) + except Exception: + # Not an error - /dev/tty doesn't work when there's no terminal. + log.swallow_exception( + "Failed to set up process group", level="info" + ) + + kwargs.update(preexec_fn=preexec_fn) + + try: + global process + process = subprocess.Popen(cmdline, env=env, bufsize=0, **kwargs) + except Exception as exc: + raise messaging.MessageHandlingError( + "Couldn't spawn debuggee: {0}\n\nCommand line:{1!r}".format( + exc, cmdline + ) + ) + + log.info("Spawned {0}.", describe()) + + if sys.platform == "win32": + # Assign the debuggee to a new job object, so that the launcher can kill + # the entire process tree later. + try: + global job_handle + job_handle = winapi.kernel32.CreateJobObjectA(None, None) + + job_info = winapi.JOBOBJECT_EXTENDED_LIMIT_INFORMATION() + job_info_size = winapi.DWORD(ctypes.sizeof(job_info)) + winapi.kernel32.QueryInformationJobObject( + job_handle, + winapi.JobObjectExtendedLimitInformation, + ctypes.pointer(job_info), + job_info_size, + ctypes.pointer(job_info_size), + ) + + job_info.BasicLimitInformation.LimitFlags |= ( + # Ensure that the job will be terminated by the OS once the + # launcher exits, even if it doesn't terminate the job explicitly. + winapi.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE + | + # Allow the debuggee to create its own jobs unrelated to ours. + winapi.JOB_OBJECT_LIMIT_BREAKAWAY_OK + ) + winapi.kernel32.SetInformationJobObject( + job_handle, + winapi.JobObjectExtendedLimitInformation, + ctypes.pointer(job_info), + job_info_size, + ) + + process_handle = winapi.kernel32.OpenProcess( + winapi.PROCESS_TERMINATE | winapi.PROCESS_SET_QUOTA, + False, + process.pid, + ) + + winapi.kernel32.AssignProcessToJobObject(job_handle, process_handle) + + except Exception: + log.swallow_exception("Failed to set up job object", level="warning") + + atexit.register(kill) + + launcher.channel.send_event( + "process", + { + "startMethod": "launch", + "isLocalProcess": True, + "systemProcessId": process.pid, + "name": process_name, + "pointerSize": struct.calcsize("P") * 8, + }, + ) + + if redirect_output: + for category, fd, tee in [ + ("stdout", stdout_r, sys.stdout), + ("stderr", stderr_r, sys.stderr), + ]: + output.CaptureOutput(describe(), category, fd, tee) + close_fds.remove(fd) + + wait_thread = threading.Thread(target=wait_for_exit, name="wait_for_exit()") + wait_thread.daemon = True + wait_thread.start() + + finally: + for fd in close_fds: + try: + os.close(fd) + except Exception: + log.swallow_exception(level="warning") + + +def kill(): + if process is None: + return + + try: + if process.poll() is None: + log.info("Killing {0}", describe()) + # Clean up the process tree + if sys.platform == "win32": + # On Windows, kill the job object. + winapi.kernel32.TerminateJobObject(job_handle, 0) + else: + # On POSIX, kill the debuggee's process group. + os.killpg(process.pid, signal.SIGKILL) + except Exception: + log.swallow_exception("Failed to kill {0}", describe()) + + +def wait_for_exit(): + try: + code = process.wait() + if sys.platform != "win32" and code < 0: + # On POSIX, if the process was terminated by a signal, Popen will use + # a negative returncode to indicate that - but the actual exit code of + # the process is always an unsigned number, and can be determined by + # taking the lowest 8 bits of that negative returncode. + code &= 0xFF + except Exception: + log.swallow_exception("Couldn't determine process exit code") + code = -1 + + log.info("{0} exited with code {1}", describe(), code) + output.wait_for_remaining_output() + + # Determine whether we should wait or not before sending "exited", so that any + # follow-up "terminate" requests don't affect the predicates. + should_wait = any(pred(code) for pred in wait_on_exit_predicates) + + try: + launcher.channel.send_event("exited", {"exitCode": code}) + except Exception: + pass + + if should_wait: + _wait_for_user_input() + + try: + launcher.channel.send_event("terminated") + except Exception: + pass + + +def _wait_for_user_input(): + if sys.stdout and sys.stdin and sys.stdin.isatty(): + from debugpy.common import log + + try: + import msvcrt + except ImportError: + can_getch = False + else: + can_getch = True + + if can_getch: + log.debug("msvcrt available - waiting for user input via getch()") + sys.stdout.write("Press any key to continue . . . ") + sys.stdout.flush() + msvcrt.getch() + else: + log.debug("msvcrt not available - waiting for user input via read()") + sys.stdout.write("Press Enter to continue . . . ") + sys.stdout.flush() + sys.stdin.read(1) diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/handlers.py b/venv/lib/python3.8/site-packages/debugpy/launcher/handlers.py new file mode 100644 index 0000000..213a5b9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/launcher/handlers.py @@ -0,0 +1,152 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import os +import sys + +import debugpy +from debugpy import launcher +from debugpy.common import json +from debugpy.launcher import debuggee + + +def launch_request(request): + debug_options = set(request("debugOptions", json.array(str))) + + # Handling of properties that can also be specified as legacy "debugOptions" flags. + # If property is explicitly set to false, but the flag is in "debugOptions", treat + # it as an error. Returns None if the property wasn't explicitly set either way. + def property_or_debug_option(prop_name, flag_name): + assert prop_name[0].islower() and flag_name[0].isupper() + + value = request(prop_name, bool, optional=True) + if value == (): + value = None + + if flag_name in debug_options: + if value is False: + raise request.isnt_valid( + '{0}:false and "debugOptions":[{1}] are mutually exclusive', + json.repr(prop_name), + json.repr(flag_name), + ) + value = True + + return value + + python = request("python", json.array(str, size=(1,))) + cmdline = list(python) + + if not request("noDebug", json.default(False)): + # see https://github.com/microsoft/debugpy/issues/861 + if sys.version_info[:2] >= (3, 11): + cmdline += ["-X", "frozen_modules=off"] + + port = request("port", int) + cmdline += [ + os.path.dirname(debugpy.__file__), + "--connect", + launcher.adapter_host + ":" + str(port), + ] + + if not request("subProcess", True): + cmdline += ["--configure-subProcess", "False"] + + qt_mode = request( + "qt", + json.enum( + "none", "auto", "pyside", "pyside2", "pyqt4", "pyqt5", optional=True + ), + ) + cmdline += ["--configure-qt", qt_mode] + + adapter_access_token = request("adapterAccessToken", str, optional=True) + if adapter_access_token != (): + cmdline += ["--adapter-access-token", adapter_access_token] + + debugpy_args = request("debugpyArgs", json.array(str)) + cmdline += debugpy_args + + # Use the copy of arguments that was propagated via the command line rather than + # "args" in the request itself, to allow for shell expansion. + cmdline += sys.argv[1:] + + process_name = request("processName", sys.executable) + + env = os.environ.copy() + env_changes = request("env", json.object((str, type(None)))) + if sys.platform == "win32": + # Environment variables are case-insensitive on Win32, so we need to normalize + # both dicts to make sure that env vars specified in the debug configuration + # overwrite the global env vars correctly. If debug config has entries that + # differ in case only, that's an error. + env = {k.upper(): v for k, v in os.environ.items()} + new_env_changes = {} + for k, v in env_changes.items(): + k_upper = k.upper() + if k_upper in new_env_changes: + if new_env_changes[k_upper] == v: + continue + else: + raise request.isnt_valid( + 'Found duplicate in "env": {0}.'.format(k_upper) + ) + new_env_changes[k_upper] = v + env_changes = new_env_changes + if "DEBUGPY_TEST" in env: + # If we're running as part of a debugpy test, make sure that codecov is not + # applied to the debuggee, since it will conflict with pydevd. + env.pop("COV_CORE_SOURCE", None) + env.update(env_changes) + env = {k: v for k, v in env.items() if v is not None} + + if request("gevent", False): + env["GEVENT_SUPPORT"] = "True" + + console = request( + "console", + json.enum( + "internalConsole", "integratedTerminal", "externalTerminal", optional=True + ), + ) + + redirect_output = property_or_debug_option("redirectOutput", "RedirectOutput") + if redirect_output is None: + # If neither the property nor the option were specified explicitly, choose + # the default depending on console type - "internalConsole" needs it to + # provide any output at all, but it's unnecessary for the terminals. + redirect_output = console == "internalConsole" + if redirect_output: + # sys.stdout buffering must be disabled - otherwise we won't see the output + # at all until the buffer fills up. + env["PYTHONUNBUFFERED"] = "1" + # Force UTF-8 output to minimize data loss due to re-encoding. + env["PYTHONIOENCODING"] = "utf-8" + + if property_or_debug_option("waitOnNormalExit", "WaitOnNormalExit"): + if console == "internalConsole": + raise request.isnt_valid( + '"waitOnNormalExit" is not supported for "console":"internalConsole"' + ) + debuggee.wait_on_exit_predicates.append(lambda code: code == 0) + if property_or_debug_option("waitOnAbnormalExit", "WaitOnAbnormalExit"): + if console == "internalConsole": + raise request.isnt_valid( + '"waitOnAbnormalExit" is not supported for "console":"internalConsole"' + ) + debuggee.wait_on_exit_predicates.append(lambda code: code != 0) + + debuggee.spawn(process_name, cmdline, env, redirect_output) + return {} + + +def terminate_request(request): + del debuggee.wait_on_exit_predicates[:] + request.respond({}) + debuggee.kill() + + +def disconnect(): + del debuggee.wait_on_exit_predicates[:] + debuggee.kill() diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/output.py b/venv/lib/python3.8/site-packages/debugpy/launcher/output.py new file mode 100644 index 0000000..70cd521 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/launcher/output.py @@ -0,0 +1,113 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import codecs +import os +import threading + +from debugpy import launcher +from debugpy.common import log + + +class CaptureOutput(object): + """Captures output from the specified file descriptor, and tees it into another + file descriptor while generating DAP "output" events for it. + """ + + instances = {} + """Keys are output categories, values are CaptureOutput instances.""" + + def __init__(self, whose, category, fd, stream): + assert category not in self.instances + self.instances[category] = self + log.info("Capturing {0} of {1}.", category, whose) + + self.category = category + self._whose = whose + self._fd = fd + self._decoder = codecs.getincrementaldecoder("utf-8")(errors="surrogateescape") + + if stream is None: + # Can happen if running under pythonw.exe. + self._stream = None + else: + self._stream = stream.buffer + encoding = stream.encoding + if encoding is None or encoding == "cp65001": + encoding = "utf-8" + try: + self._encode = codecs.getencoder(encoding) + except Exception: + log.swallow_exception( + "Unsupported {0} encoding {1!r}; falling back to UTF-8.", + category, + encoding, + level="warning", + ) + self._encode = codecs.getencoder("utf-8") + else: + log.info("Using encoding {0!r} for {1}", encoding, category) + + self._worker_thread = threading.Thread(target=self._worker, name=category) + self._worker_thread.start() + + def __del__(self): + fd = self._fd + if fd is not None: + try: + os.close(fd) + except Exception: + pass + + def _worker(self): + while self._fd is not None: + try: + s = os.read(self._fd, 0x1000) + except Exception: + break + if not len(s): + break + self._process_chunk(s) + + # Flush any remaining data in the incremental decoder. + self._process_chunk(b"", final=True) + + def _process_chunk(self, s, final=False): + s = self._decoder.decode(s, final=final) + if len(s) == 0: + return + + try: + launcher.channel.send_event( + "output", {"category": self.category, "output": s.replace("\r\n", "\n")} + ) + except Exception: + pass # channel to adapter is already closed + + if self._stream is None: + return + + try: + s, _ = self._encode(s, "surrogateescape") + size = len(s) + i = 0 + while i < size: + written = self._stream.write(s[i:]) + self._stream.flush() + if written == 0: + # This means that the output stream was closed from the other end. + # Do the same to the debuggee, so that it knows as well. + os.close(self._fd) + self._fd = None + break + i += written + except Exception: + log.swallow_exception("Error printing {0!r} to {1}", s, self.category) + + +def wait_for_remaining_output(): + """Waits for all remaining output to be captured and propagated.""" + for category, instance in CaptureOutput.instances.items(): + log.info("Waiting for remaining {0} of {1}.", category, instance._whose) + instance._worker_thread.join() diff --git a/venv/lib/python3.8/site-packages/debugpy/launcher/winapi.py b/venv/lib/python3.8/site-packages/debugpy/launcher/winapi.py new file mode 100644 index 0000000..a93dbc7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/launcher/winapi.py @@ -0,0 +1,104 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import ctypes +from ctypes.wintypes import BOOL, DWORD, HANDLE, LARGE_INTEGER, LPCSTR, UINT + +from debugpy.common import log + + +JOBOBJECTCLASS = ctypes.c_int +LPDWORD = ctypes.POINTER(DWORD) +LPVOID = ctypes.c_void_p +SIZE_T = ctypes.c_size_t +ULONGLONG = ctypes.c_ulonglong + + +class IO_COUNTERS(ctypes.Structure): + _fields_ = [ + ("ReadOperationCount", ULONGLONG), + ("WriteOperationCount", ULONGLONG), + ("OtherOperationCount", ULONGLONG), + ("ReadTransferCount", ULONGLONG), + ("WriteTransferCount", ULONGLONG), + ("OtherTransferCount", ULONGLONG), + ] + + +class JOBOBJECT_BASIC_LIMIT_INFORMATION(ctypes.Structure): + _fields_ = [ + ("PerProcessUserTimeLimit", LARGE_INTEGER), + ("PerJobUserTimeLimit", LARGE_INTEGER), + ("LimitFlags", DWORD), + ("MinimumWorkingSetSize", SIZE_T), + ("MaximumWorkingSetSize", SIZE_T), + ("ActiveProcessLimit", DWORD), + ("Affinity", SIZE_T), + ("PriorityClass", DWORD), + ("SchedulingClass", DWORD), + ] + + +class JOBOBJECT_EXTENDED_LIMIT_INFORMATION(ctypes.Structure): + _fields_ = [ + ("BasicLimitInformation", JOBOBJECT_BASIC_LIMIT_INFORMATION), + ("IoInfo", IO_COUNTERS), + ("ProcessMemoryLimit", SIZE_T), + ("JobMemoryLimit", SIZE_T), + ("PeakProcessMemoryUsed", SIZE_T), + ("PeakJobMemoryUsed", SIZE_T), + ] + + +JobObjectExtendedLimitInformation = JOBOBJECTCLASS(9) + +JOB_OBJECT_LIMIT_BREAKAWAY_OK = 0x00000800 +JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE = 0x00002000 + +PROCESS_TERMINATE = 0x0001 +PROCESS_SET_QUOTA = 0x0100 + + +def _errcheck(is_error_result=(lambda result: not result)): + def impl(result, func, args): + if is_error_result(result): + log.debug("{0} returned {1}", func.__name__, result) + raise ctypes.WinError() + else: + return result + + return impl + + +kernel32 = ctypes.windll.kernel32 + +kernel32.AssignProcessToJobObject.errcheck = _errcheck() +kernel32.AssignProcessToJobObject.restype = BOOL +kernel32.AssignProcessToJobObject.argtypes = (HANDLE, HANDLE) + +kernel32.CreateJobObjectA.errcheck = _errcheck(lambda result: result == 0) +kernel32.CreateJobObjectA.restype = HANDLE +kernel32.CreateJobObjectA.argtypes = (LPVOID, LPCSTR) + +kernel32.OpenProcess.errcheck = _errcheck(lambda result: result == 0) +kernel32.OpenProcess.restype = HANDLE +kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD) + +kernel32.QueryInformationJobObject.errcheck = _errcheck() +kernel32.QueryInformationJobObject.restype = BOOL +kernel32.QueryInformationJobObject.argtypes = ( + HANDLE, + JOBOBJECTCLASS, + LPVOID, + DWORD, + LPDWORD, +) + +kernel32.SetInformationJobObject.errcheck = _errcheck() +kernel32.SetInformationJobObject.restype = BOOL +kernel32.SetInformationJobObject.argtypes = (HANDLE, JOBOBJECTCLASS, LPVOID, DWORD) + +kernel32.TerminateJobObject.errcheck = _errcheck() +kernel32.TerminateJobObject.restype = BOOL +kernel32.TerminateJobObject.argtypes = (HANDLE, UINT) diff --git a/venv/lib/python3.8/site-packages/debugpy/public_api.py b/venv/lib/python3.8/site-packages/debugpy/public_api.py new file mode 100644 index 0000000..3c80089 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/public_api.py @@ -0,0 +1,185 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from __future__ import annotations + +import functools +import typing + +from debugpy import _version + + +# Expose debugpy.server API from subpackage, but do not actually import it unless +# and until a member is invoked - we don't want the server package loaded in the +# adapter, the tests, or setup.py. + +# Docstrings for public API members must be formatted according to PEP 8 - no more +# than 72 characters per line! - and must be readable when retrieved via help(). + + +Endpoint = typing.Tuple[str, int] + + +def _api(cancelable=False): + def apply(f): + @functools.wraps(f) + def wrapper(*args, **kwargs): + from debugpy.server import api + + wrapped = getattr(api, f.__name__) + return wrapped(*args, **kwargs) + + if cancelable: + + def cancel(*args, **kwargs): + from debugpy.server import api + + wrapped = getattr(api, f.__name__) + return wrapped.cancel(*args, **kwargs) + + wrapper.cancel = cancel + + return wrapper + + return apply + + +@_api() +def log_to(__path: str) -> None: + """Generate detailed debugpy logs in the specified directory. + + The directory must already exist. Several log files are generated, + one for every process involved in the debug session. + """ + + +@_api() +def configure(__properties: dict[str, typing.Any] | None = None, **kwargs) -> None: + """Sets debug configuration properties that cannot be set in the + "attach" request, because they must be applied as early as possible + in the process being debugged. + + For example, a "launch" configuration with subprocess debugging + disabled can be defined entirely in JSON:: + + { + "request": "launch", + "subProcess": false, + ... + } + + But the same cannot be done with "attach", because "subProcess" + must be known at the point debugpy starts tracing execution. Thus, + it is not available in JSON, and must be omitted:: + + { + "request": "attach", + ... + } + + and set from within the debugged process instead:: + + debugpy.configure(subProcess=False) + debugpy.listen(...) + + Properties to set can be passed either as a single dict argument, + or as separate keyword arguments:: + + debugpy.configure({"subProcess": False}) + """ + + +@_api() +def listen(__endpoint: Endpoint | int) -> Endpoint: + """Starts a debug adapter debugging this process, that listens for + incoming socket connections from clients on the specified address. + + `__endpoint` must be either a (host, port) tuple as defined by the + standard `socket` module for the `AF_INET` address family, or a port + number. If only the port is specified, host is "127.0.0.1". + + Returns the interface and the port on which the debug adapter is + actually listening, in the same format as `__endpoint`. This may be + different from address if port was 0 in the latter, in which case + the adapter will pick some unused ephemeral port to listen on. + + This function does't wait for a client to connect to the debug + adapter that it starts. Use `wait_for_client` to block execution + until the client connects. + """ + + +@_api() +def connect(__endpoint: Endpoint | int, *, access_token: str | None = None) -> Endpoint: + """Tells an existing debug adapter instance that is listening on the + specified address to debug this process. + + `__endpoint` must be either a (host, port) tuple as defined by the + standard `socket` module for the `AF_INET` address family, or a port + number. If only the port is specified, host is "127.0.0.1". + + `access_token` must be the same value that was passed to the adapter + via the `--server-access-token` command-line switch. + + This function does't wait for a client to connect to the debug + adapter that it connects to. Use `wait_for_client` to block + execution until the client connects. + """ + + +@_api(cancelable=True) +def wait_for_client() -> None: + """If there is a client connected to the debug adapter that is + debugging this process, returns immediately. Otherwise, blocks + until a client connects to the adapter. + + While this function is waiting, it can be canceled by calling + `wait_for_client.cancel()` from another thread. + """ + + +@_api() +def is_client_connected() -> bool: + """True if a client is connected to the debug adapter that is + debugging this process. + """ + + +@_api() +def breakpoint() -> None: + """If a client is connected to the debug adapter that is debugging + this process, pauses execution of all threads, and simulates a + breakpoint being hit at the line following the call. + + It is also registered as the default handler for builtins.breakpoint(). + """ + + +@_api() +def debug_this_thread() -> None: + """Makes the debugger aware of the current thread. + + Must be called on any background thread that is started by means + other than the usual Python APIs (i.e. the "threading" module), + in order for breakpoints to work on that thread. + """ + + +@_api() +def trace_this_thread(__should_trace: bool): + """Tells the debug adapter to enable or disable tracing on the + current thread. + + When the thread is traced, the debug adapter can detect breakpoints + being hit, but execution is slower, especially in functions that + have any breakpoints set in them. Disabling tracing when breakpoints + are not anticipated to be hit can improve performance. It can also + be used to skip breakpoints on a particular thread. + + Tracing is automatically disabled for all threads when there is no + client connected to the debug adapter. + """ + + +__version__: str = _version.get_versions()["version"] diff --git a/venv/lib/python3.8/site-packages/debugpy/server/__init__.py b/venv/lib/python3.8/site-packages/debugpy/server/__init__.py new file mode 100644 index 0000000..42d5367 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/server/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +# "force_pydevd" must be imported first to ensure (via side effects) +# that the debugpy-vendored copy of pydevd gets used. +import debugpy._vendored.force_pydevd # noqa diff --git a/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..3a369db Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/api.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/api.cpython-38.pyc new file mode 100644 index 0000000..76b6f42 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/api.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/attach_pid_injected.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/attach_pid_injected.cpython-38.pyc new file mode 100644 index 0000000..c28e5b9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/attach_pid_injected.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/cli.cpython-38.pyc b/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/cli.cpython-38.pyc new file mode 100644 index 0000000..3cc3fed Binary files /dev/null and b/venv/lib/python3.8/site-packages/debugpy/server/__pycache__/cli.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/debugpy/server/api.py b/venv/lib/python3.8/site-packages/debugpy/server/api.py new file mode 100644 index 0000000..515ea06 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/server/api.py @@ -0,0 +1,343 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import codecs +import os +import pydevd +import socket +import sys +import threading + +import debugpy +from debugpy import adapter +from debugpy.common import json, log, sockets +from _pydevd_bundle.pydevd_constants import get_global_debugger +from pydevd_file_utils import absolute_path +from debugpy.common.util import hide_debugpy_internals + +_tls = threading.local() + +# TODO: "gevent", if possible. +_config = { + "qt": "none", + "subProcess": True, + "python": sys.executable, +} + +_config_valid_values = { + # If property is not listed here, any value is considered valid, so long as + # its type matches that of the default value in _config. + "qt": ["auto", "none", "pyside", "pyside2", "pyqt4", "pyqt5"], +} + +# This must be a global to prevent it from being garbage collected and triggering +# https://bugs.python.org/issue37380. +_adapter_process = None + + +def _settrace(*args, **kwargs): + log.debug("pydevd.settrace(*{0!r}, **{1!r})", args, kwargs) + # The stdin in notification is not acted upon in debugpy, so, disable it. + kwargs.setdefault("notify_stdin", False) + try: + return pydevd.settrace(*args, **kwargs) + except Exception: + raise + else: + _settrace.called = True + + +_settrace.called = False + + +def ensure_logging(): + """Starts logging to log.log_dir, if it hasn't already been done.""" + if ensure_logging.ensured: + return + ensure_logging.ensured = True + log.to_file(prefix="debugpy.server") + log.describe_environment("Initial environment:") + + +ensure_logging.ensured = False + + +def log_to(path): + if ensure_logging.ensured: + raise RuntimeError("logging has already begun") + + log.debug("log_to{0!r}", (path,)) + if path is sys.stderr: + log.stderr.levels |= set(log.LEVELS) + else: + log.log_dir = path + + +def configure(properties=None, **kwargs): + if _settrace.called: + raise RuntimeError("debug adapter is already running") + + ensure_logging() + log.debug("configure{0!r}", (properties, kwargs)) + + if properties is None: + properties = kwargs + else: + properties = dict(properties) + properties.update(kwargs) + + for k, v in properties.items(): + if k not in _config: + raise ValueError("Unknown property {0!r}".format(k)) + expected_type = type(_config[k]) + if type(v) is not expected_type: + raise ValueError("{0!r} must be a {1}".format(k, expected_type.__name__)) + valid_values = _config_valid_values.get(k) + if (valid_values is not None) and (v not in valid_values): + raise ValueError("{0!r} must be one of: {1!r}".format(k, valid_values)) + _config[k] = v + + +def _starts_debugging(func): + def debug(address, **kwargs): + if _settrace.called: + raise RuntimeError("this process already has a debug adapter") + + try: + _, port = address + except Exception: + port = address + address = ("127.0.0.1", port) + try: + port.__index__() # ensure it's int-like + except Exception: + raise ValueError("expected port or (host, port)") + if not (0 <= port < 2 ** 16): + raise ValueError("invalid port number") + + ensure_logging() + log.debug("{0}({1!r}, **{2!r})", func.__name__, address, kwargs) + log.info("Initial debug configuration: {0}", json.repr(_config)) + + qt_mode = _config.get("qt", "none") + if qt_mode != "none": + pydevd.enable_qt_support(qt_mode) + + settrace_kwargs = { + "suspend": False, + "patch_multiprocessing": _config.get("subProcess", True), + } + + if hide_debugpy_internals(): + debugpy_path = os.path.dirname(absolute_path(debugpy.__file__)) + settrace_kwargs["dont_trace_start_patterns"] = (debugpy_path,) + settrace_kwargs["dont_trace_end_patterns"] = (str("debugpy_launcher.py"),) + + try: + return func(address, settrace_kwargs, **kwargs) + except Exception: + log.reraise_exception("{0}() failed:", func.__name__, level="info") + + return debug + + +@_starts_debugging +def listen(address, settrace_kwargs): + # Errors below are logged with level="info", because the caller might be catching + # and handling exceptions, and we don't want to spam their stderr unnecessarily. + + import subprocess + + server_access_token = codecs.encode(os.urandom(32), "hex").decode("ascii") + + try: + endpoints_listener = sockets.create_server("127.0.0.1", 0, timeout=10) + except Exception as exc: + log.swallow_exception("Can't listen for adapter endpoints:") + raise RuntimeError("can't listen for adapter endpoints: " + str(exc)) + + try: + endpoints_host, endpoints_port = endpoints_listener.getsockname() + log.info( + "Waiting for adapter endpoints on {0}:{1}...", + endpoints_host, + endpoints_port, + ) + + host, port = address + adapter_args = [ + _config.get("python", sys.executable), + os.path.dirname(adapter.__file__), + "--for-server", + str(endpoints_port), + "--host", + host, + "--port", + str(port), + "--server-access-token", + server_access_token, + ] + if log.log_dir is not None: + adapter_args += ["--log-dir", log.log_dir] + log.info("debugpy.listen() spawning adapter: {0}", json.repr(adapter_args)) + + # On Windows, detach the adapter from our console, if any, so that it doesn't + # receive Ctrl+C from it, and doesn't keep it open once we exit. + creationflags = 0 + if sys.platform == "win32": + creationflags |= 0x08000000 # CREATE_NO_WINDOW + creationflags |= 0x00000200 # CREATE_NEW_PROCESS_GROUP + + # Adapter will outlive this process, so we shouldn't wait for it. However, we + # need to ensure that the Popen instance for it doesn't get garbage-collected + # by holding a reference to it in a non-local variable, to avoid triggering + # https://bugs.python.org/issue37380. + try: + global _adapter_process + _adapter_process = subprocess.Popen( + adapter_args, close_fds=True, creationflags=creationflags + ) + if os.name == "posix": + # It's going to fork again to daemonize, so we need to wait on it to + # clean it up properly. + _adapter_process.wait() + else: + # Suppress misleading warning about child process still being alive when + # this process exits (https://bugs.python.org/issue38890). + _adapter_process.returncode = 0 + pydevd.add_dont_terminate_child_pid(_adapter_process.pid) + except Exception as exc: + log.swallow_exception("Error spawning debug adapter:", level="info") + raise RuntimeError("error spawning debug adapter: " + str(exc)) + + try: + sock, _ = endpoints_listener.accept() + try: + sock.settimeout(None) + sock_io = sock.makefile("rb", 0) + try: + endpoints = json.loads(sock_io.read().decode("utf-8")) + finally: + sock_io.close() + finally: + sockets.close_socket(sock) + except socket.timeout: + log.swallow_exception( + "Timed out waiting for adapter to connect:", level="info" + ) + raise RuntimeError("timed out waiting for adapter to connect") + except Exception as exc: + log.swallow_exception("Error retrieving adapter endpoints:", level="info") + raise RuntimeError("error retrieving adapter endpoints: " + str(exc)) + + finally: + endpoints_listener.close() + + log.info("Endpoints received from adapter: {0}", json.repr(endpoints)) + + if "error" in endpoints: + raise RuntimeError(str(endpoints["error"])) + + try: + server_host = str(endpoints["server"]["host"]) + server_port = int(endpoints["server"]["port"]) + client_host = str(endpoints["client"]["host"]) + client_port = int(endpoints["client"]["port"]) + except Exception as exc: + log.swallow_exception( + "Error parsing adapter endpoints:\n{0}\n", + json.repr(endpoints), + level="info", + ) + raise RuntimeError("error parsing adapter endpoints: " + str(exc)) + log.info( + "Adapter is accepting incoming client connections on {0}:{1}", + client_host, + client_port, + ) + + _settrace( + host=server_host, + port=server_port, + wait_for_ready_to_run=False, + block_until_connected=True, + access_token=server_access_token, + **settrace_kwargs + ) + log.info("pydevd is connected to adapter at {0}:{1}", server_host, server_port) + return client_host, client_port + + +@_starts_debugging +def connect(address, settrace_kwargs, access_token=None): + host, port = address + _settrace(host=host, port=port, client_access_token=access_token, **settrace_kwargs) + + +class wait_for_client: + def __call__(self): + ensure_logging() + log.debug("wait_for_client()") + + pydb = get_global_debugger() + if pydb is None: + raise RuntimeError("listen() or connect() must be called first") + + cancel_event = threading.Event() + self.cancel = cancel_event.set + pydevd._wait_for_attach(cancel=cancel_event) + + @staticmethod + def cancel(): + raise RuntimeError("wait_for_client() must be called first") + + +wait_for_client = wait_for_client() + + +def is_client_connected(): + return pydevd._is_attached() + + +def breakpoint(): + ensure_logging() + if not is_client_connected(): + log.info("breakpoint() ignored - debugger not attached") + return + log.debug("breakpoint()") + + # Get the first frame in the stack that's not an internal frame. + pydb = get_global_debugger() + stop_at_frame = sys._getframe().f_back + while ( + stop_at_frame is not None + and pydb.get_file_type(stop_at_frame) == pydb.PYDEV_FILE + ): + stop_at_frame = stop_at_frame.f_back + + _settrace( + suspend=True, + trace_only_current_thread=True, + patch_multiprocessing=False, + stop_at_frame=stop_at_frame, + ) + stop_at_frame = None + + +def debug_this_thread(): + ensure_logging() + log.debug("debug_this_thread()") + + _settrace(suspend=False) + + +def trace_this_thread(should_trace): + ensure_logging() + log.debug("trace_this_thread({0!r})", should_trace) + + pydb = get_global_debugger() + if should_trace: + pydb.enable_tracing() + else: + pydb.disable_tracing() diff --git a/venv/lib/python3.8/site-packages/debugpy/server/attach_pid_injected.py b/venv/lib/python3.8/site-packages/debugpy/server/attach_pid_injected.py new file mode 100644 index 0000000..a8df6e1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/server/attach_pid_injected.py @@ -0,0 +1,92 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +"""Script injected into the debuggee process during attach-to-PID.""" + +import os + + +__file__ = os.path.abspath(__file__) +_debugpy_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) + + +def attach(setup): + log = None + try: + import sys + + if "threading" not in sys.modules: + try: + + def on_warn(msg): + print(msg, file=sys.stderr) + + def on_exception(msg): + print(msg, file=sys.stderr) + + def on_critical(msg): + print(msg, file=sys.stderr) + + pydevd_attach_to_process_path = os.path.join( + _debugpy_dir, + "debugpy", + "_vendored", + "pydevd", + "pydevd_attach_to_process", + ) + assert os.path.exists(pydevd_attach_to_process_path) + sys.path.insert(0, pydevd_attach_to_process_path) + + # NOTE: that it's not a part of the pydevd PYTHONPATH + import attach_script + + attach_script.fix_main_thread_id( + on_warn=on_warn, on_exception=on_exception, on_critical=on_critical + ) + + # NOTE: At this point it should be safe to remove this. + sys.path.remove(pydevd_attach_to_process_path) + except: + import traceback + + traceback.print_exc() + raise + + sys.path.insert(0, _debugpy_dir) + try: + import debugpy + import debugpy.server + from debugpy.common import json, log + import pydevd + finally: + assert sys.path[0] == _debugpy_dir + del sys.path[0] + + py_db = pydevd.get_global_debugger() + if py_db is not None: + py_db.dispose_and_kill_all_pydevd_threads(wait=False) + + if setup["log_to"] is not None: + debugpy.log_to(setup["log_to"]) + log.info("Configuring injected debugpy: {0}", json.repr(setup)) + + if setup["mode"] == "listen": + debugpy.listen(setup["address"]) + elif setup["mode"] == "connect": + debugpy.connect( + setup["address"], access_token=setup["adapter_access_token"] + ) + else: + raise AssertionError(repr(setup)) + + except: + import traceback + + traceback.print_exc() + if log is None: + raise + else: + log.reraise_exception() + + log.info("debugpy injected successfully") diff --git a/venv/lib/python3.8/site-packages/debugpy/server/cli.py b/venv/lib/python3.8/site-packages/debugpy/server/cli.py new file mode 100644 index 0000000..ba14347 --- /dev/null +++ b/venv/lib/python3.8/site-packages/debugpy/server/cli.py @@ -0,0 +1,434 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +import json +import os +import re +import sys +from importlib.util import find_spec + +# debugpy.__main__ should have preloaded pydevd properly before importing this module. +# Otherwise, some stdlib modules above might have had imported threading before pydevd +# could perform the necessary detours in it. +assert "pydevd" in sys.modules +import pydevd + +# Note: use the one bundled from pydevd so that it's invisible for the user. +from _pydevd_bundle import pydevd_runpy as runpy + +import debugpy +from debugpy.common import log +from debugpy.server import api + + +TARGET = " | -m | -c | --pid " + +HELP = """debugpy {0} +See https://aka.ms/debugpy for documentation. + +Usage: debugpy --listen | --connect + [:] + [--wait-for-client] + [--configure- ]... + [--log-to ] [--log-to-stderr] + {1} + []... +""".format( + debugpy.__version__, TARGET +) + + +class Options(object): + mode = None + address = None + log_to = None + log_to_stderr = False + target = None + target_kind = None + wait_for_client = False + adapter_access_token = None + + +options = Options() +options.config = {"qt": "none", "subProcess": True} + + +def in_range(parser, start, stop): + def parse(s): + n = parser(s) + if start is not None and n < start: + raise ValueError("must be >= {0}".format(start)) + if stop is not None and n >= stop: + raise ValueError("must be < {0}".format(stop)) + return n + + return parse + + +pid = in_range(int, 0, None) + + +def print_help_and_exit(switch, it): + print(HELP, file=sys.stderr) + sys.exit(0) + + +def print_version_and_exit(switch, it): + print(debugpy.__version__) + sys.exit(0) + + +def set_arg(varname, parser=(lambda x: x)): + def do(arg, it): + value = parser(next(it)) + setattr(options, varname, value) + + return do + + +def set_const(varname, value): + def do(arg, it): + setattr(options, varname, value) + + return do + + +def set_address(mode): + def do(arg, it): + if options.address is not None: + raise ValueError("--listen and --connect are mutually exclusive") + + # It's either host:port, or just port. + value = next(it) + host, sep, port = value.partition(":") + if not sep: + host = "127.0.0.1" + port = value + try: + port = int(port) + except Exception: + port = -1 + if not (0 <= port < 2 ** 16): + raise ValueError("invalid port number") + + options.mode = mode + options.address = (host, port) + + return do + + +def set_config(arg, it): + prefix = "--configure-" + assert arg.startswith(prefix) + name = arg[len(prefix) :] + value = next(it) + + if name not in options.config: + raise ValueError("unknown property {0!r}".format(name)) + + expected_type = type(options.config[name]) + try: + if expected_type is bool: + value = {"true": True, "false": False}[value.lower()] + else: + value = expected_type(value) + except Exception: + raise ValueError("{0!r} must be a {1}".format(name, expected_type.__name__)) + + options.config[name] = value + + +def set_target(kind, parser=(lambda x: x), positional=False): + def do(arg, it): + options.target_kind = kind + target = parser(arg if positional else next(it)) + + if isinstance(target, bytes): + # target may be the code, so, try some additional encodings... + try: + target = target.decode(sys.getfilesystemencoding()) + except UnicodeDecodeError: + try: + target = target.decode("utf-8") + except UnicodeDecodeError: + import locale + + target = target.decode(locale.getpreferredencoding(False)) + options.target = target + + return do + + +# fmt: off +switches = [ + # Switch Placeholder Action + # ====== =========== ====== + + # Switches that are documented for use by end users. + ("-(\\?|h|-help)", None, print_help_and_exit), + ("-(V|-version)", None, print_version_and_exit), + ("--log-to" , "", set_arg("log_to")), + ("--log-to-stderr", None, set_const("log_to_stderr", True)), + ("--listen", "
    ", set_address("listen")), + ("--connect", "
    ", set_address("connect")), + ("--wait-for-client", None, set_const("wait_for_client", True)), + ("--configure-.+", "", set_config), + + # Switches that are used internally by the client or debugpy itself. + ("--adapter-access-token", "", set_arg("adapter_access_token")), + + # Targets. The "" entry corresponds to positional command line arguments, + # i.e. the ones not preceded by any switch name. + ("", "", set_target("file", positional=True)), + ("-m", "", set_target("module")), + ("-c", "", set_target("code")), + ("--pid", "", set_target("pid", pid)), +] +# fmt: on + + +def consume_argv(): + while len(sys.argv) >= 2: + value = sys.argv[1] + del sys.argv[1] + yield value + + +def parse_argv(): + seen = set() + it = consume_argv() + + while True: + try: + arg = next(it) + except StopIteration: + raise ValueError("missing target: " + TARGET) + + switch = arg + if not switch.startswith("-"): + switch = "" + for pattern, placeholder, action in switches: + if re.match("^(" + pattern + ")$", switch): + break + else: + raise ValueError("unrecognized switch " + switch) + + if switch in seen: + raise ValueError("duplicate switch " + switch) + else: + seen.add(switch) + + try: + action(arg, it) + except StopIteration: + assert placeholder is not None + raise ValueError("{0}: missing {1}".format(switch, placeholder)) + except Exception as exc: + raise ValueError("invalid {0} {1}: {2}".format(switch, placeholder, exc)) + + if options.target is not None: + break + + if options.mode is None: + raise ValueError("either --listen or --connect is required") + if options.adapter_access_token is not None and options.mode != "connect": + raise ValueError("--adapter-access-token requires --connect") + if options.target_kind == "pid" and options.wait_for_client: + raise ValueError("--pid does not support --wait-for-client") + + assert options.target is not None + assert options.target_kind is not None + assert options.address is not None + + +def start_debugging(argv_0): + # We need to set up sys.argv[0] before invoking either listen() or connect(), + # because they use it to report the "process" event. Thus, we can't rely on + # run_path() and run_module() doing that, even though they will eventually. + sys.argv[0] = argv_0 + + log.debug("sys.argv after patching: {0!r}", sys.argv) + + debugpy.configure(options.config) + + if options.mode == "listen": + debugpy.listen(options.address) + elif options.mode == "connect": + debugpy.connect(options.address, access_token=options.adapter_access_token) + else: + raise AssertionError(repr(options.mode)) + + if options.wait_for_client: + debugpy.wait_for_client() + + +def run_file(): + target = options.target + start_debugging(target) + + # run_path has one difference with invoking Python from command-line: + # if the target is a file (rather than a directory), it does not add its + # parent directory to sys.path. Thus, importing other modules from the + # same directory is broken unless sys.path is patched here. + + if os.path.isfile(target): + dir = os.path.dirname(target) + sys.path.insert(0, dir) + else: + log.debug("Not a file: {0!r}", target) + + log.describe_environment("Pre-launch environment:") + + log.info("Running file {0!r}", target) + runpy.run_path(target, run_name="__main__") + + +def run_module(): + # Add current directory to path, like Python itself does for -m. This must + # be in place before trying to use find_spec below to resolve submodules. + sys.path.insert(0, str("")) + + # We want to do the same thing that run_module() would do here, without + # actually invoking it. + argv_0 = sys.argv[0] + try: + spec = find_spec(options.target) + if spec is not None: + argv_0 = spec.origin + except Exception: + log.swallow_exception("Error determining module path for sys.argv") + + start_debugging(argv_0) + log.describe_environment("Pre-launch environment:") + log.info("Running module {0!r}", options.target) + + # Docs say that runpy.run_module is equivalent to -m, but it's not actually + # the case for packages - -m sets __name__ to "__main__", but run_module sets + # it to "pkg.__main__". This breaks everything that uses the standard pattern + # __name__ == "__main__" to detect being run as a CLI app. On the other hand, + # runpy._run_module_as_main is a private function that actually implements -m. + try: + run_module_as_main = runpy._run_module_as_main + except AttributeError: + log.warning("runpy._run_module_as_main is missing, falling back to run_module.") + runpy.run_module(options.target, alter_sys=True) + else: + run_module_as_main(options.target, alter_argv=True) + + +def run_code(): + # Add current directory to path, like Python itself does for -c. + sys.path.insert(0, str("")) + code = compile(options.target, str(""), str("exec")) + + start_debugging(str("-c")) + + log.describe_environment("Pre-launch environment:") + log.info("Running code:\n\n{0}", options.target) + + eval(code, {}) + + +def attach_to_pid(): + pid = options.target + log.info("Attaching to process with PID={0}", pid) + + encode = lambda s: list(bytearray(s.encode("utf-8"))) if s is not None else None + + script_dir = os.path.dirname(debugpy.server.__file__) + assert os.path.exists(script_dir) + script_dir = encode(script_dir) + + setup = { + "mode": options.mode, + "address": options.address, + "wait_for_client": options.wait_for_client, + "log_to": options.log_to, + "adapter_access_token": options.adapter_access_token, + } + setup = encode(json.dumps(setup)) + + python_code = """ +import codecs; +import json; +import sys; + +decode = lambda s: codecs.utf_8_decode(bytearray(s))[0] if s is not None else None; + +script_dir = decode({script_dir}); +setup = json.loads(decode({setup})); + +sys.path.insert(0, script_dir); +import attach_pid_injected; +del sys.path[0]; + +attach_pid_injected.attach(setup); +""" + python_code = ( + python_code.replace("\r", "") + .replace("\n", "") + .format(script_dir=script_dir, setup=setup) + ) + log.info("Code to be injected: \n{0}", python_code.replace(";", ";\n")) + + # pydevd restriction on characters in injected code. + assert not ( + {'"', "'", "\r", "\n"} & set(python_code) + ), "Injected code should not contain any single quotes, double quotes, or newlines." + + pydevd_attach_to_process_path = os.path.join( + os.path.dirname(pydevd.__file__), "pydevd_attach_to_process" + ) + + assert os.path.exists(pydevd_attach_to_process_path) + sys.path.append(pydevd_attach_to_process_path) + + try: + import add_code_to_python_process # noqa + + log.info("Injecting code into process with PID={0} ...", pid) + add_code_to_python_process.run_python_code( + pid, + python_code, + connect_debugger_tracing=True, + show_debug_info=int(os.getenv("DEBUGPY_ATTACH_BY_PID_DEBUG_INFO", "0")), + ) + except Exception: + log.reraise_exception("Code injection into PID={0} failed:", pid) + log.info("Code injection into PID={0} completed.", pid) + + +def main(): + original_argv = list(sys.argv) + try: + parse_argv() + except Exception as exc: + print(str(HELP) + str("\nError: ") + str(exc), file=sys.stderr) + sys.exit(2) + + if options.log_to is not None: + debugpy.log_to(options.log_to) + if options.log_to_stderr: + debugpy.log_to(sys.stderr) + + api.ensure_logging() + + log.info( + str("sys.argv before parsing: {0!r}\n" " after parsing: {1!r}"), + original_argv, + sys.argv, + ) + + try: + run = { + "file": run_file, + "module": run_module, + "code": run_code, + "pid": attach_to_pid, + }[options.target_kind] + run() + except SystemExit as exc: + log.reraise_exception( + "Debuggee exited via SystemExit: {0!r}", exc.code, level="debug" + ) diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/LICENSE.txt b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/LICENSE.txt new file mode 120000 index 0000000..21f668d --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/LICENSE.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/80/8e/10/c8a6ab8deb149ff9b3fb19f447a808094606d712a9ca57fead3552599d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/METADATA b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/METADATA new file mode 100644 index 0000000..8d98f09 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/METADATA @@ -0,0 +1,32 @@ +Metadata-Version: 2.1 +Name: distlib +Version: 0.3.5 +Summary: Distribution utilities +Home-page: https://github.com/pypa/distlib +Author: Vinay Sajip +Author-email: vinay_sajip@red-dove.com +License: Python license +Project-URL: Documentation, https://distlib.readthedocs.io/ +Project-URL: Source, https://github.com/pypa/distlib +Project-URL: Tracker, https://github.com/pypa/distlib/issues +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Software Development +License-File: LICENSE.txt + +Low-level components of distutils2/packaging, augmented with higher-level APIs for making packaging easier. + diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/RECORD b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/RECORD new file mode 100644 index 0000000..85fb5d9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/RECORD @@ -0,0 +1,38 @@ +distlib-0.3.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +distlib-0.3.5.dist-info/LICENSE.txt,sha256=gI4QyKarjesUn_mz-xn0R6gICUYG1xKpylf-rTVSWZ0,14531 +distlib-0.3.5.dist-info/METADATA,sha256=fQuTZdIqg5DSNbtlFKMnowGnKbCeqU_E46es6qL-y44,1322 +distlib-0.3.5.dist-info/RECORD,, +distlib-0.3.5.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +distlib-0.3.5.dist-info/top_level.txt,sha256=9BERqitu_vzyeyILOcGzX9YyA2AB_xlC4-81V6xoizk,8 +distlib/__init__.py,sha256=kshNHF2XFPxmBv57X7Jsj7c6VzF5r9naVwGePP-s5Wc,581 +distlib/__pycache__/__init__.cpython-38.pyc,, +distlib/__pycache__/compat.cpython-38.pyc,, +distlib/__pycache__/database.cpython-38.pyc,, +distlib/__pycache__/index.cpython-38.pyc,, +distlib/__pycache__/locators.cpython-38.pyc,, +distlib/__pycache__/manifest.cpython-38.pyc,, +distlib/__pycache__/markers.cpython-38.pyc,, +distlib/__pycache__/metadata.cpython-38.pyc,, +distlib/__pycache__/resources.cpython-38.pyc,, +distlib/__pycache__/scripts.cpython-38.pyc,, +distlib/__pycache__/util.cpython-38.pyc,, +distlib/__pycache__/version.cpython-38.pyc,, +distlib/__pycache__/wheel.cpython-38.pyc,, +distlib/compat.py,sha256=tfoMrj6tujk7G4UC2owL6ArgDuCKabgBxuJRGZSmpko,41259 +distlib/database.py,sha256=o_mw0fAr93NDAHHHfqG54Y1Hi9Rkfrp2BX15XWZYK50,51697 +distlib/index.py,sha256=HFiDG7LMoaBs829WuotrfIwcErOOExUOR_AeBtw_TCU,20834 +distlib/locators.py,sha256=wNzG-zERzS_XGls-nBPVVyLRHa2skUlkn0-5n0trMWA,51991 +distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 +distlib/markers.py,sha256=TpHHHLgkzyT7YHbwj-2i6weRaq-Ivy2-MUnrDkjau-U,5058 +distlib/metadata.py,sha256=g_DIiu8nBXRzA-mWPRpatHGbmFZqaFoss7z9TG7QSUU,39801 +distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820 +distlib/scripts.py,sha256=BmkTKmiTk4m2cj-iueliatwz3ut_9SsABBW51vnQnZU,18102 +distlib/t32.exe,sha256=lD3IWCwZiYZ9onypJifi2R1sGS-S24t3mYupsEAOKyA,97792 +distlib/t64-arm.exe,sha256=3hjLTLg7XBVHwDxgzZpkiHXXp4IJQMLvg_ZlGGpKZDI,182784 +distlib/t64.exe,sha256=vvS31h4-SnumthypJJKAWBeBh7cxSJ8AwGmbqumM05k,107520 +distlib/util.py,sha256=31dPXn3Rfat0xZLeVoFpuniyhe6vsbl9_QN-qd9Lhlk,66262 +distlib/version.py,sha256=WG__LyAa2GwmA6qSoEJtvJE8REA1LZpbSizy8WvhJLk,23513 +distlib/w32.exe,sha256=-a9nfgVZClK_mGnbQtXVLbn7xtPYw3xDKTddKnu2AJ8,91648 +distlib/w64-arm.exe,sha256=LW-JyIIGblDOp8Psy8igJXDvcg_YpJIINvN0tRjfv18,168448 +distlib/w64.exe,sha256=XvKqAoQzca86ERd2-KV5Vo1RSOBM28I2a35sPQONrYI,101888 +distlib/wheel.py,sha256=Rgqs658VsJ3R2845qwnZD8XQryV2CzWw2mghwLvxxsI,43898 diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/WHEEL b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/WHEEL new file mode 120000 index 0000000..ae483c4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cf/d8/f4/c406bf26650a3299b3ef62b464600b48cfe7fb04159866e5797c765478 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/top_level.txt new file mode 100644 index 0000000..f68bb07 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib-0.3.5.dist-info/top_level.txt @@ -0,0 +1 @@ +distlib diff --git a/venv/lib/python3.8/site-packages/distlib/__init__.py b/venv/lib/python3.8/site-packages/distlib/__init__.py new file mode 100644 index 0000000..5055565 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/__init__.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2019 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.3.5' + +class DistlibException(Exception): + pass + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + class NullHandler(logging.Handler): + def handle(self, record): pass + def emit(self, record): pass + def createLock(self): self.lock = None + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d827be2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/compat.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/compat.cpython-38.pyc new file mode 100644 index 0000000..46adb6e Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/compat.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/database.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/database.cpython-38.pyc new file mode 100644 index 0000000..a3259b3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/database.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/index.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/index.cpython-38.pyc new file mode 100644 index 0000000..88a2c93 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/index.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/locators.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/locators.cpython-38.pyc new file mode 100644 index 0000000..997b749 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/locators.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/manifest.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/manifest.cpython-38.pyc new file mode 100644 index 0000000..992bd6d Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/manifest.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/markers.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/markers.cpython-38.pyc new file mode 100644 index 0000000..b0c942c Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/markers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/metadata.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/metadata.cpython-38.pyc new file mode 100644 index 0000000..6342dad Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/metadata.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/resources.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/resources.cpython-38.pyc new file mode 100644 index 0000000..a8abe34 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/resources.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/scripts.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/scripts.cpython-38.pyc new file mode 100644 index 0000000..64fb583 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/scripts.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/util.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/util.cpython-38.pyc new file mode 100644 index 0000000..20e430b Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/util.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/version.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000..3df1452 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/version.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/wheel.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/wheel.cpython-38.pyc new file mode 100644 index 0000000..d3690fa Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/wheel.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/distlib/compat.py b/venv/lib/python3.8/site-packages/distlib/compat.py new file mode 120000 index 0000000..614d2f0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/compat.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b5/fa/0c/ae3eadba393b1b8502da8c0be80ae00ee08a69b801c6e2511994a6a64a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/distlib/database.py b/venv/lib/python3.8/site-packages/distlib/database.py new file mode 100644 index 0000000..5db5d7f --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/database.py @@ -0,0 +1,1350 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""PEP 376 implementation.""" + +from __future__ import unicode_literals + +import base64 +import codecs +import contextlib +import hashlib +import logging +import os +import posixpath +import sys +import zipimport + +from . import DistlibException, resources +from .compat import StringIO +from .version import get_scheme, UnsupportedVersionError +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (parse_requirement, cached_property, parse_name_and_version, + read_exports, write_exports, CSVReader, CSVWriter) + + +__all__ = ['Distribution', 'BaseInstalledDistribution', + 'InstalledDistribution', 'EggInfoDistribution', + 'DistributionPath'] + + +logger = logging.getLogger(__name__) + +EXPORTS_FILENAME = 'pydist-exports.json' +COMMANDS_FILENAME = 'pydist-commands.json' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', + 'RESOURCES', EXPORTS_FILENAME, 'SHARED') + +DISTINFO_EXT = '.dist-info' + + +class _Cache(object): + """ + A simple cache mapping names and .dist-info paths to distributions + """ + def __init__(self): + """ + Initialise an instance. There is normally one for each DistributionPath. + """ + self.name = {} + self.path = {} + self.generated = False + + def clear(self): + """ + Clear the cache, setting it to its initial state. + """ + self.name.clear() + self.path.clear() + self.generated = False + + def add(self, dist): + """ + Add a distribution to the cache. + :param dist: The distribution to add. + """ + if dist.path not in self.path: + self.path[dist.path] = dist + self.name.setdefault(dist.key, []).append(dist) + + +class DistributionPath(object): + """ + Represents a set of distributions installed on a path (typically sys.path). + """ + def __init__(self, path=None, include_egg=False): + """ + Create an instance from a path, optionally including legacy (distutils/ + setuptools/distribute) distributions. + :param path: The path to use, as a list of directories. If not specified, + sys.path is used. + :param include_egg: If True, this instance will look for and return legacy + distributions as well as those based on PEP 376. + """ + if path is None: + path = sys.path + self.path = path + self._include_dist = True + self._include_egg = include_egg + + self._cache = _Cache() + self._cache_egg = _Cache() + self._cache_enabled = True + self._scheme = get_scheme('default') + + def _get_cache_enabled(self): + return self._cache_enabled + + def _set_cache_enabled(self, value): + self._cache_enabled = value + + cache_enabled = property(_get_cache_enabled, _set_cache_enabled) + + def clear_cache(self): + """ + Clears the internal cache. + """ + self._cache.clear() + self._cache_egg.clear() + + + def _yield_distributions(self): + """ + Yield .dist-info and/or .egg(-info) distributions. + """ + # We need to check if we've seen some resources already, because on + # some Linux systems (e.g. some Debian/Ubuntu variants) there are + # symlinks which alias other files in the environment. + seen = set() + for path in self.path: + finder = resources.finder_for_path(path) + if finder is None: + continue + r = finder.find('') + if not r or not r.is_container: + continue + rset = sorted(r.resources) + for entry in rset: + r = finder.find(entry) + if not r or r.path in seen: + continue + try: + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [METADATA_FILENAME, + WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join(entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue + + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, metadata=metadata, + env=self) + elif self._include_egg and entry.endswith(('.egg-info', + '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + except Exception as e: + msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s' + logger.warning(msg, r.path, e) + import warnings + warnings.warn(msg % (r.path, e), stacklevel=2) + + def _generate_cache(self): + """ + Scan the path for distributions and populate the cache with + those that are found. + """ + gen_dist = not self._cache.generated + gen_egg = self._include_egg and not self._cache_egg.generated + if gen_dist or gen_egg: + for dist in self._yield_distributions(): + if isinstance(dist, InstalledDistribution): + self._cache.add(dist) + else: + self._cache_egg.add(dist) + + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True + + @classmethod + def distinfo_dirname(cls, name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + name = name.replace('-', '_') + return '-'.join([name, version]) + DISTINFO_EXT + + def get_distributions(self): + """ + Provides an iterator that looks for distributions and returns + :class:`InstalledDistribution` or + :class:`EggInfoDistribution` instances for each one of them. + + :rtype: iterator of :class:`InstalledDistribution` and + :class:`EggInfoDistribution` instances + """ + if not self._cache_enabled: + for dist in self._yield_distributions(): + yield dist + else: + self._generate_cache() + + for dist in self._cache.path.values(): + yield dist + + if self._include_egg: + for dist in self._cache_egg.path.values(): + yield dist + + def get_distribution(self, name): + """ + Looks for a named distribution on the path. + + This function only returns the first result found, as no more than one + value is expected. If nothing is found, ``None`` is returned. + + :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` + or ``None`` + """ + result = None + name = name.lower() + if not self._cache_enabled: + for dist in self._yield_distributions(): + if dist.key == name: + result = dist + break + else: + self._generate_cache() + + if name in self._cache.name: + result = self._cache.name[name][0] + elif self._include_egg and name in self._cache_egg.name: + result = self._cache_egg.name[name][0] + return result + + def provides_distribution(self, name, version=None): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + matcher = None + if version is not None: + try: + matcher = self._scheme.matcher('%s (%s)' % (name, version)) + except ValueError: + raise DistlibException('invalid name or version: %r, %r' % + (name, version)) + + for dist in self.get_distributions(): + # We hit a problem on Travis where enum34 was installed and doesn't + # have a provides attribute ... + if not hasattr(dist, 'provides'): + logger.debug('No "provides": %s', dist) + else: + provided = dist.provides + + for p in provided: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: + yield dist + break + else: + if p_name == name and matcher.match(p_ver): + yield dist + break + + def get_file_path(self, name, relative_path): + """ + Return the path to a resource file. + """ + dist = self.get_distribution(name) + if dist is None: + raise LookupError('no distribution named %r found' % name) + return dist.get_resource_path(relative_path) + + def get_exported_entries(self, category, name=None): + """ + Return all of the exported entries in a particular category. + + :param category: The category to search for entries. + :param name: If specified, only entries with that name are returned. + """ + for dist in self.get_distributions(): + r = dist.exports + if category in r: + d = r[category] + if name is not None: + if name in d: + yield d[name] + else: + for v in d.values(): + yield v + + +class Distribution(object): + """ + A base class for distributions, whether installed or from indexes. + Either way, it must have some metadata, so that's all that's needed + for construction. + """ + + build_time_dependency = False + """ + Set to True if it's known to be only a build-time dependency (i.e. + not needed after installation). + """ + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, metadata): + """ + Initialise an instance. + :param metadata: The instance of :class:`Metadata` describing this + distribution. + """ + self.metadata = metadata + self.name = metadata.name + self.key = self.name.lower() # for case-insensitive comparisons + self.version = metadata.version + self.locator = None + self.digest = None + self.extras = None # additional features requested + self.context = None # environment marker overrides + self.download_urls = set() + self.digests = {} + + @property + def source_url(self): + """ + The source archive download URL for this distribution. + """ + return self.metadata.source_url + + download_url = source_url # Backward compatibility + + @property + def name_and_version(self): + """ + A utility property which displays the name and version in parentheses. + """ + return '%s (%s)' % (self.name, self.version) + + @property + def provides(self): + """ + A set of distribution names and versions provided by this distribution. + :return: A set of "name (version)" strings. + """ + plist = self.metadata.provides + s = '%s (%s)' % (self.name, self.version) + if s not in plist: + plist.append(s) + return plist + + def _get_requirements(self, req_attr): + md = self.metadata + reqts = getattr(md, req_attr) + logger.debug('%s: got requirements %r from metadata: %r', self.name, req_attr, + reqts) + return set(md.get_requirements(reqts, extras=self.extras, + env=self.context)) + + @property + def run_requires(self): + return self._get_requirements('run_requires') + + @property + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') + + @property + def test_requires(self): + return self._get_requirements('test_requires') + + @property + def dev_requires(self): + return self._get_requirements('dev_requires') + + def matches_requirement(self, req): + """ + Say if this instance matches (fulfills) a requirement. + :param req: The requirement to match. + :rtype req: str + :return: True if it matches, else False. + """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) + scheme = get_scheme(self.metadata.scheme) + try: + matcher = scheme.matcher(r.requirement) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + result = False + for p in self.provides: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: + continue + try: + result = matcher.match(p_ver) + break + except UnsupportedVersionError: + pass + return result + + def __repr__(self): + """ + Return a textual representation of this instance, + """ + if self.source_url: + suffix = ' [%s]' % self.source_url + else: + suffix = '' + return '' % (self.name, self.version, suffix) + + def __eq__(self, other): + """ + See if this distribution is the same as another. + :param other: The distribution to compare with. To be equal to one + another. distributions must have the same type, name, + version and source_url. + :return: True if it is the same, else False. + """ + if type(other) is not type(self): + result = False + else: + result = (self.name == other.name and + self.version == other.version and + self.source_url == other.source_url) + return result + + def __hash__(self): + """ + Compute hash in a way which matches the equality test. + """ + return hash(self.name) + hash(self.version) + hash(self.source_url) + + +class BaseInstalledDistribution(Distribution): + """ + This is the base class for installed distributions (whether PEP 376 or + legacy). + """ + + hasher = None + + def __init__(self, metadata, path, env=None): + """ + Initialise an instance. + :param metadata: An instance of :class:`Metadata` which describes the + distribution. This will normally have been initialised + from a metadata file in the ``path``. + :param path: The path of the ``.dist-info`` or ``.egg-info`` + directory for the distribution. + :param env: This is normally the :class:`DistributionPath` + instance where this distribution was found. + """ + super(BaseInstalledDistribution, self).__init__(metadata) + self.path = path + self.dist_path = env + + def get_hash(self, data, hasher=None): + """ + Get the hash of some data, using a particular hash algorithm, if + specified. + + :param data: The data to be hashed. + :type data: bytes + :param hasher: The name of a hash implementation, supported by hashlib, + or ``None``. Examples of valid values are ``'sha1'``, + ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and + ``'sha512'``. If no hasher is specified, the ``hasher`` + attribute of the :class:`InstalledDistribution` instance + is used. If the hasher is determined to be ``None``, MD5 + is used as the hashing algorithm. + :returns: The hash of the data. If a hasher was explicitly specified, + the returned hash will be prefixed with the specified hasher + followed by '='. + :rtype: str + """ + if hasher is None: + hasher = self.hasher + if hasher is None: + hasher = hashlib.md5 + prefix = '' + else: + hasher = getattr(hashlib, hasher) + prefix = '%s=' % self.hasher + digest = hasher(data).digest() + digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') + return '%s%s' % (prefix, digest) + + +class InstalledDistribution(BaseInstalledDistribution): + """ + Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``pydist.json`` when it is + instantiated., or uses a passed in Metadata instance (useful for when + dry-run mode is being used). + """ + + hasher = 'sha256' + + def __init__(self, path, metadata=None, env=None): + self.modules = [] + self.finder = finder = resources.finder_for_path(path) + if finder is None: + raise ValueError('finder unavailable for %s' % path) + if env and env._cache_enabled and path in env._cache.path: + metadata = env._cache.path[path].metadata + elif metadata is None: + r = finder.find(METADATA_FILENAME) + # Temporary - for Wheel 0.23 support + if r is None: + r = finder.find(WHEEL_METADATA_FILENAME) + # Temporary - for legacy support + if r is None: + r = finder.find(LEGACY_METADATA_FILENAME) + if r is None: + raise ValueError('no %s found in %s' % (METADATA_FILENAME, + path)) + with contextlib.closing(r.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + + super(InstalledDistribution, self).__init__(metadata, path, env) + + if env and env._cache_enabled: + env._cache.add(self) + + r = finder.find('REQUESTED') + self.requested = r is not None + p = os.path.join(path, 'top_level.txt') + if os.path.exists(p): + with open(p, 'rb') as f: + data = f.read().decode('utf-8') + self.modules = data.splitlines() + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def _get_records(self): + """ + Get the list of installed files for the distribution + :return: A list of tuples of path, hash and size. Note that hash and + size might be ``None`` for some entries. The path is exactly + as stored in the file (which is as in PEP 376). + """ + results = [] + r = self.get_distinfo_resource('RECORD') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as record_reader: + # Base location is parent dir of .dist-info dir + #base_location = os.path.dirname(self.path) + #base_location = os.path.abspath(base_location) + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + #if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) + results.append((path, checksum, size)) + return results + + @cached_property + def exports(self): + """ + Return the information exported by this distribution. + :return: A dictionary of exports, mapping an export category to a dict + of :class:`ExportEntry` instances describing the individual + export entries, and keyed by name. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + result = self.read_exports() + return result + + def read_exports(self): + """ + Read exports data from a file in .ini format. + + :return: A dictionary of exports, mapping an export category to a list + of :class:`ExportEntry` instances describing the individual + export entries. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + with contextlib.closing(r.as_stream()) as stream: + result = read_exports(stream) + return result + + def write_exports(self, exports): + """ + Write a dictionary of exports to a file in .ini format. + :param exports: A dictionary of exports, mapping an export category to + a list of :class:`ExportEntry` instances describing the + individual export entries. + """ + rf = self.get_distinfo_file(EXPORTS_FILENAME) + with open(rf, 'w') as f: + write_exports(exports, f) + + def get_resource_path(self, relative_path): + """ + NOTE: This API may change in the future. + + Return the absolute path to a resource file with the given relative + path. + + :param relative_path: The path, relative to .dist-info, of the resource + of interest. + :return: The absolute path where the resource is to be found. + """ + r = self.get_distinfo_resource('RESOURCES') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as resources_reader: + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError('no resource file with relative path %r ' + 'is installed' % relative_path) + + def list_installed_files(self): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: iterator of (path, hash, size) + """ + for result in self._get_records(): + yield result + + def write_installed_files(self, paths, prefix, dry_run=False): + """ + Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any + existing ``RECORD`` file is silently overwritten. + + prefix is used to determine when to write absolute paths. + """ + prefix = os.path.join(prefix, '') + base = os.path.dirname(self.path) + base_under_prefix = base.startswith(prefix) + base = os.path.join(base, '') + record_path = self.get_distinfo_file('RECORD') + logger.info('creating %s', record_path) + if dry_run: + return None + with CSVWriter(record_path) as writer: + for path in paths: + if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): + # do not put size and hash, as in PEP-376 + hash_value = size = '' + else: + size = '%d' % os.path.getsize(path) + with open(path, 'rb') as fp: + hash_value = self.get_hash(fp.read()) + if path.startswith(base) or (base_under_prefix and + path.startswith(prefix)): + path = os.path.relpath(path, base) + writer.writerow((path, hash_value, size)) + + # add the RECORD file itself + if record_path.startswith(base): + record_path = os.path.relpath(record_path, base) + writer.writerow((record_path, '', '')) + return record_path + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + base = os.path.dirname(self.path) + record_path = self.get_distinfo_file('RECORD') + for path, hash_value, size in self.list_installed_files(): + if not os.path.isabs(path): + path = os.path.join(base, path) + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + elif os.path.isfile(path): + actual_size = str(os.path.getsize(path)) + if size and actual_size != size: + mismatches.append((path, 'size', size, actual_size)) + elif hash_value: + if '=' in hash_value: + hasher = hash_value.split('=', 1)[0] + else: + hasher = None + + with open(path, 'rb') as f: + actual_hash = self.get_hash(f.read(), hasher) + if actual_hash != hash_value: + mismatches.append((path, 'hash', hash_value, actual_hash)) + return mismatches + + @cached_property + def shared_locations(self): + """ + A dictionary of shared locations whose keys are in the set 'prefix', + 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. + The corresponding value is the absolute path of that category for + this distribution, and takes into account any paths selected by the + user at installation time (e.g. via command-line arguments). In the + case of the 'namespace' key, this would be a list of absolute paths + for the roots of namespace packages in this distribution. + + The first time this property is accessed, the relevant information is + read from the SHARED file in the .dist-info directory. + """ + result = {} + shared_path = os.path.join(self.path, 'SHARED') + if os.path.isfile(shared_path): + with codecs.open(shared_path, 'r', encoding='utf-8') as f: + lines = f.read().splitlines() + for line in lines: + key, value = line.split('=', 1) + if key == 'namespace': + result.setdefault(key, []).append(value) + else: + result[key] = value + return result + + def write_shared_locations(self, paths, dry_run=False): + """ + Write shared location information to the SHARED file in .dist-info. + :param paths: A dictionary as described in the documentation for + :meth:`shared_locations`. + :param dry_run: If True, the action is logged but no file is actually + written. + :return: The path of the file written to. + """ + shared_path = os.path.join(self.path, 'SHARED') + logger.info('creating %s', shared_path) + if dry_run: + return None + lines = [] + for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): + path = paths[key] + if os.path.isdir(paths[key]): + lines.append('%s=%s' % (key, path)) + for ns in paths.get('namespace', ()): + lines.append('namespace=%s' % ns) + + with codecs.open(shared_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + return shared_path + + def get_distinfo_resource(self, path): + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + finder = resources.finder_for_path(self.path) + if finder is None: + raise DistlibException('Unable to get a finder for %s' % self.path) + return finder.find(path) + + def get_distinfo_file(self, path): + """ + Returns a path located under the ``.dist-info`` directory. Returns a + string representing the path. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`DistlibException` is raised + :type path: str + :rtype: str + """ + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise DistlibException( + 'dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + + return os.path.join(self.path, path) + + def list_distinfo_files(self): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :returns: iterator of paths + """ + base = os.path.dirname(self.path) + for path, checksum, size in self._get_records(): + # XXX add separator or use real relpath algo + if not os.path.isabs(path): + path = os.path.join(base, path) + if path.startswith(self.path): + yield path + + def __eq__(self, other): + return (isinstance(other, InstalledDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(BaseInstalledDistribution): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + requested = True # as we have no way of knowing, assume it was + shared_locations = {} + + def __init__(self, path, env=None): + def set_name_and_version(s, n, v): + s.name = n + s.key = n.lower() # for case-insensitive comparisons + s.version = v + + self.path = path + self.dist_path = env + if env and env._cache_enabled and path in env._cache_egg.path: + metadata = env._cache_egg.path[path].metadata + set_name_and_version(self, metadata.name, metadata.version) + else: + metadata = self._get_metadata(path) + + # Need to be set before caching + set_name_and_version(self, metadata.name, metadata.version) + + if env and env._cache_enabled: + env._cache_egg.add(self) + super(EggInfoDistribution, self).__init__(metadata, path, env) + + def _get_metadata(self, path): + requires = None + + def parse_requires_data(data): + """Create a list of dependencies from a requires.txt file. + + *data*: the contents of a setuptools-produced requires.txt file. + """ + reqs = [] + lines = data.splitlines() + for line in lines: + line = line.strip() + if line.startswith('['): + logger.warning('Unexpected line: quitting requirement scan: %r', + line) + break + r = parse_requirement(line) + if not r: + logger.warning('Not recognised as a requirement: %r', line) + continue + if r.extras: + logger.warning('extra requirements in requires.txt are ' + 'not supported') + if not r.constraints: + reqs.append(r.name) + else: + cons = ', '.join('%s%s' % c for c in r.constraints) + reqs.append('%s (%s)' % (r.name, cons)) + return reqs + + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + + tl_path = tl_data = None + if path.endswith('.egg'): + if os.path.isdir(path): + p = os.path.join(path, 'EGG-INFO') + meta_path = os.path.join(p, 'PKG-INFO') + metadata = Metadata(path=meta_path, scheme='legacy') + req_path = os.path.join(p, 'requires.txt') + tl_path = os.path.join(p, 'top_level.txt') + requires = parse_requires_path(req_path) + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO( + zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + metadata = Metadata(fileobj=fileobj, scheme='legacy') + try: + data = zipf.get_data('EGG-INFO/requires.txt') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') + requires = parse_requires_data(data.decode('utf-8')) + except IOError: + requires = None + elif path.endswith('.egg-info'): + if os.path.isdir(path): + req_path = os.path.join(path, 'requires.txt') + requires = parse_requires_path(req_path) + path = os.path.join(path, 'PKG-INFO') + tl_path = os.path.join(path, 'top_level.txt') + metadata = Metadata(path=path, scheme='legacy') + else: + raise DistlibException('path must end with .egg-info or .egg, ' + 'got %r' % path) + + if requires: + metadata.add_requirements(requires) + # look for top-level modules in top_level.txt, if present + if tl_data is None: + if tl_path is not None and os.path.exists(tl_path): + with open(tl_path, 'rb') as f: + tl_data = f.read().decode('utf-8') + if not tl_data: + tl_data = [] + else: + tl_data = tl_data.splitlines() + self.modules = tl_data + return metadata + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + for path, _, _ in self.list_installed_files(): + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + return mismatches + + def list_installed_files(self): + """ + Iterates over the ``installed-files.txt`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: a list of (path, hash, size) + """ + + def _md5(path): + f = open(path, 'rb') + try: + content = f.read() + finally: + f.close() + return hashlib.md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + record_path = os.path.join(self.path, 'installed-files.txt') + result = [] + if os.path.exists(record_path): + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + p = os.path.normpath(os.path.join(self.path, line)) + # "./" is present as a marker between installed files + # and installation metadata files + if not os.path.exists(p): + logger.warning('Non-existent file: %s', p) + if p.endswith(('.pyc', '.pyo')): + continue + #otherwise fall through and fail + if not os.path.isdir(p): + result.append((p, _md5(p), _size(p))) + result.append((record_path, None, None)) + return result + + def list_distinfo_files(self, absolute=False): + """ + Iterates over the ``installed-files.txt`` entries and returns paths for + each line if the path is pointing to a file located in the + ``.egg-info`` directory or one of its subdirectories. + + :parameter absolute: If *absolute* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``installed-files.txt`` is returned. + :type absolute: boolean + :returns: iterator of paths + """ + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + skip = True + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line == './': + skip = False + continue + if not skip: + p = os.path.normpath(os.path.join(self.path, line)) + if p.startswith(self.path): + if absolute: + yield p + else: + yield line + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + +new_dist_class = InstalledDistribution +old_dist_class = EggInfoDistribution + + +class DependencyGraph(object): + """ + Represents a dependency graph between distributions. + + The dependency relationships are stored in an ``adjacency_list`` that maps + distributions to a list of ``(other, label)`` tuples where ``other`` + is a distribution and the edge is labeled with ``label`` (i.e. the version + specifier, if such was provided). Also, for more efficient traversal, for + every distribution ``x``, a list of predecessors is kept in + ``reverse_list[x]``. An edge from distribution ``a`` to + distribution ``b`` means that ``a`` depends on ``b``. If any missing + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. + """ + + def __init__(self): + self.adjacency_list = {} + self.reverse_list = {} + self.missing = {} + + def add_distribution(self, distribution): + """Add the *distribution* to the graph. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + """ + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + #self.missing[distribution] = [] + + def add_edge(self, x, y, label=None): + """Add an edge from distribution *x* to distribution *y* with the given + *label*. + + :type x: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type label: ``str`` or ``None`` + """ + self.adjacency_list[x].append((y, label)) + # multiple edges are allowed, so be careful + if x not in self.reverse_list[y]: + self.reverse_list[y].append(x) + + def add_missing(self, distribution, requirement): + """ + Add a missing *requirement* for the given *distribution*. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + :type requirement: ``str`` + """ + logger.debug('%s missing %r', distribution, requirement) + self.missing.setdefault(distribution, []).append(requirement) + + def _repr_dist(self, dist): + return '%s %s' % (dist.name, dist.version) + + def repr_node(self, dist, level=1): + """Prints only a subgraph""" + output = [self._repr_dist(dist)] + for other, label in self.adjacency_list[dist]: + dist = self._repr_dist(other) + if label is not None: + dist = '%s [%s]' % (dist, label) + output.append(' ' * level + str(dist)) + suboutput = self.repr_node(other, level + 1) + subs = suboutput.split('\n') + output.extend(subs[1:]) + return '\n'.join(output) + + def to_dot(self, f, skip_disconnected=True): + """Writes a DOT output for the graph to the provided file *f*. + + If *skip_disconnected* is set to ``True``, then all distributions + that are not dependent on any other distribution are skipped. + + :type f: has to support ``file``-like operations + :type skip_disconnected: ``bool`` + """ + disconnected = [] + + f.write("digraph dependencies {\n") + for dist, adjs in self.adjacency_list.items(): + if len(adjs) == 0 and not skip_disconnected: + disconnected.append(dist) + for other, label in adjs: + if not label is None: + f.write('"%s" -> "%s" [label="%s"]\n' % + (dist.name, other.name, label)) + else: + f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + if not skip_disconnected and len(disconnected) > 0: + f.write('subgraph disconnected {\n') + f.write('label = "Disconnected"\n') + f.write('bgcolor = red\n') + + for dist in disconnected: + f.write('"%s"' % dist.name) + f.write('\n') + f.write('}\n') + f.write('}\n') + + def topological_sort(self): + """ + Perform a topological sort of the graph. + :return: A tuple, the first element of which is a topologically sorted + list of distributions, and the second element of which is a + list of distributions that cannot be sorted because they have + circular dependencies and so form a cycle. + """ + result = [] + # Make a shallow copy of the adjacency list + alist = {} + for k, v in self.adjacency_list.items(): + alist[k] = v[:] + while True: + # See what we can remove in this run + to_remove = [] + for k, v in list(alist.items())[:]: + if not v: + to_remove.append(k) + del alist[k] + if not to_remove: + # What's left in alist (if anything) is a cycle. + break + # Remove from the adjacency list of others + for k, v in alist.items(): + alist[k] = [(d, r) for d, r in v if d not in to_remove] + logger.debug('Moving to result: %s', + ['%s (%s)' % (d.name, d.version) for d in to_remove]) + result.extend(to_remove) + return result, list(alist.keys()) + + def __repr__(self): + """Representation of the graph""" + output = [] + for dist, adjs in self.adjacency_list.items(): + output.append(self.repr_node(dist)) + return '\n'.join(output) + + +def make_graph(dists, scheme='default'): + """Makes a dependency graph from the given distributions. + + :parameter dists: a list of distributions + :type dists: list of :class:`distutils2.database.InstalledDistribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance + """ + scheme = get_scheme(scheme) + graph = DependencyGraph() + provided = {} # maps names to lists of (version, dist) tuples + + # first, build the graph and find out what's provided + for dist in dists: + graph.add_distribution(dist) + + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + provided.setdefault(name, []).append((version, dist)) + + # now make the edges + for dist in dists: + requires = (dist.run_requires | dist.meta_requires | + dist.build_requires | dist.dev_requires) + for req in requires: + try: + matcher = scheme.matcher(req) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + matched = False + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + graph.add_edge(dist, provider, req) + matched = True + break + if not matched: + graph.add_missing(dist, req) + return graph + + +def get_dependent_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + dependent on *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + dep = [dist] # dependent distributions + todo = graph.reverse_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop() + dep.append(d) + for succ in graph.reverse_list[d]: + if succ not in dep: + todo.append(succ) + + dep.pop(0) # remove dist from dep, was there to prevent infinite loops + return dep + + +def get_required_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + required by *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + in finding the dependencies. + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + req = set() # required distributions + todo = graph.adjacency_list[dist] # list of nodes we should inspect + seen = set(t[0] for t in todo) # already added to todo + + while todo: + d = todo.pop()[0] + req.add(d) + pred_list = graph.adjacency_list[d] + for pred in pred_list: + d = pred[0] + if d not in req and d not in seen: + seen.add(d) + todo.append(pred) + return req + + +def make_dist(name, version, **kwargs): + """ + A convenience method for making a dist given just a name and version. + """ + summary = kwargs.pop('summary', 'Placeholder for summary') + md = Metadata(**kwargs) + md.name = name + md.version = version + md.summary = summary or 'Placeholder for summary' + return Distribution(md) diff --git a/venv/lib/python3.8/site-packages/distlib/index.py b/venv/lib/python3.8/site-packages/distlib/index.py new file mode 100644 index 0000000..9b6d129 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/index.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import hashlib +import logging +import os +import shutil +import subprocess +import tempfile +try: + from threading import Thread +except ImportError: # pragma: no cover + from dummy_threading import Thread + +from . import DistlibException +from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, + urlparse, build_opener, string_types) +from .util import zip_dir, ServerProxy + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX = 'https://pypi.org/pypi' +DEFAULT_REALM = 'pypi' + +class PackageIndex(object): + """ + This class represents a package index compatible with PyPI, the Python + Package Index. + """ + + boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' + + def __init__(self, url=None): + """ + Initialise an instance. + + :param url: The URL of the index. If not specified, the URL for PyPI is + used. + """ + self.url = url or DEFAULT_INDEX + self.read_configuration() + scheme, netloc, path, params, query, frag = urlparse(self.url) + if params or query or frag or scheme not in ('http', 'https'): + raise DistlibException('invalid repository: %s' % self.url) + self.password_handler = None + self.ssl_verifier = None + self.gpg = None + self.gpg_home = None + with open(os.devnull, 'w') as sink: + # Use gpg by default rather than gpg2, as gpg2 insists on + # prompting for passwords + for s in ('gpg', 'gpg2'): + try: + rc = subprocess.check_call([s, '--version'], stdout=sink, + stderr=sink) + if rc == 0: + self.gpg = s + break + except OSError: + pass + + def _get_pypirc_command(self): + """ + Get the distutils command for interacting with PyPI configurations. + :return: the command. + """ + from .util import _get_pypirc_command as cmd + return cmd() + + def read_configuration(self): + """ + Read the PyPI access configuration as supported by distutils. This populates + ``username``, ``password``, ``realm`` and ``url`` attributes from the + configuration. + """ + from .util import _load_pypirc + cfg = _load_pypirc(self) + self.username = cfg.get('username') + self.password = cfg.get('password') + self.realm = cfg.get('realm', 'pypi') + self.url = cfg.get('repository', self.url) + + def save_configuration(self): + """ + Save the PyPI access configuration. You must have set ``username`` and + ``password`` attributes before calling this method. + """ + self.check_credentials() + from .util import _store_pypirc + _store_pypirc(self) + + def check_credentials(self): + """ + Check that ``username`` and ``password`` have been set, and raise an + exception if not. + """ + if self.username is None or self.password is None: + raise DistlibException('username and password must be set') + pm = HTTPPasswordMgr() + _, netloc, _, _, _, _ = urlparse(self.url) + pm.add_password(self.realm, netloc, self.username, self.password) + self.password_handler = HTTPBasicAuthHandler(pm) + + def register(self, metadata): # pragma: no cover + """ + Register a distribution on PyPI, using the provided metadata. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the distribution to be + registered. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + metadata.validate() + d = metadata.todict() + d[':action'] = 'verify' + request = self.encode_request(d.items(), []) + response = self.send_request(request) + d[':action'] = 'submit' + request = self.encode_request(d.items(), []) + return self.send_request(request) + + def _reader(self, name, stream, outbuf): + """ + Thread runner for reading lines of from a subprocess into a buffer. + + :param name: The logical name of the stream (used for logging only). + :param stream: The stream to read from. This will typically a pipe + connected to the output stream of a subprocess. + :param outbuf: The list to append the read lines to. + """ + while True: + s = stream.readline() + if not s: + break + s = s.decode('utf-8').rstrip() + outbuf.append(s) + logger.debug('%s: %s' % (name, s)) + stream.close() + + def get_sign_command(self, filename, signer, sign_password, keystore=None): # pragma: no cover + """ + Return a suitable command for signing a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The signing command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + if sign_password is not None: + cmd.extend(['--batch', '--passphrase-fd', '0']) + td = tempfile.mkdtemp() + sf = os.path.join(td, os.path.basename(filename) + '.asc') + cmd.extend(['--detach-sign', '--armor', '--local-user', + signer, '--output', sf, filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd, sf + + def run_command(self, cmd, input_data=None): + """ + Run a command in a child process , passing it any input data specified. + + :param cmd: The command to run. + :param input_data: If specified, this must be a byte string containing + data to be sent to the child process. + :return: A tuple consisting of the subprocess' exit code, a list of + lines read from the subprocess' ``stdout``, and a list of + lines read from the subprocess' ``stderr``. + """ + kwargs = { + 'stdout': subprocess.PIPE, + 'stderr': subprocess.PIPE, + } + if input_data is not None: + kwargs['stdin'] = subprocess.PIPE + stdout = [] + stderr = [] + p = subprocess.Popen(cmd, **kwargs) + # We don't use communicate() here because we may need to + # get clever with interacting with the command + t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) + t1.start() + t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) + t2.start() + if input_data is not None: + p.stdin.write(input_data) + p.stdin.close() + + p.wait() + t1.join() + t2.join() + return p.returncode, stdout, stderr + + def sign_file(self, filename, signer, sign_password, keystore=None): # pragma: no cover + """ + Sign a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The absolute pathname of the file where the signature is + stored. + """ + cmd, sig_file = self.get_sign_command(filename, signer, sign_password, + keystore) + rc, stdout, stderr = self.run_command(cmd, + sign_password.encode('utf-8')) + if rc != 0: + raise DistlibException('sign command failed with error ' + 'code %s' % rc) + return sig_file + + def upload_file(self, metadata, filename, signer=None, sign_password=None, + filetype='sdist', pyversion='source', keystore=None): + """ + Upload a release file to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the file to be uploaded. + :param filename: The pathname of the file to be uploaded. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param filetype: The type of the file being uploaded. This is the + distutils command which produced that file, e.g. + ``sdist`` or ``bdist_wheel``. + :param pyversion: The version of Python which the release relates + to. For code compatible with any Python, this would + be ``source``, otherwise it would be e.g. ``3.2``. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.exists(filename): + raise DistlibException('not found: %s' % filename) + metadata.validate() + d = metadata.todict() + sig_file = None + if signer: + if not self.gpg: + logger.warning('no signing program available - not signed') + else: + sig_file = self.sign_file(filename, signer, sign_password, + keystore) + with open(filename, 'rb') as f: + file_data = f.read() + md5_digest = hashlib.md5(file_data).hexdigest() + sha256_digest = hashlib.sha256(file_data).hexdigest() + d.update({ + ':action': 'file_upload', + 'protocol_version': '1', + 'filetype': filetype, + 'pyversion': pyversion, + 'md5_digest': md5_digest, + 'sha256_digest': sha256_digest, + }) + files = [('content', os.path.basename(filename), file_data)] + if sig_file: + with open(sig_file, 'rb') as f: + sig_data = f.read() + files.append(('gpg_signature', os.path.basename(sig_file), + sig_data)) + shutil.rmtree(os.path.dirname(sig_file)) + request = self.encode_request(d.items(), files) + return self.send_request(request) + + def upload_documentation(self, metadata, doc_dir): # pragma: no cover + """ + Upload documentation to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the documentation to be + uploaded. + :param doc_dir: The pathname of the directory which contains the + documentation. This should be the directory that + contains the ``index.html`` for the documentation. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.isdir(doc_dir): + raise DistlibException('not a directory: %r' % doc_dir) + fn = os.path.join(doc_dir, 'index.html') + if not os.path.exists(fn): + raise DistlibException('not found: %r' % fn) + metadata.validate() + name, version = metadata.name, metadata.version + zip_data = zip_dir(doc_dir).getvalue() + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] + files = [('content', name, zip_data)] + request = self.encode_request(fields, files) + return self.send_request(request) + + def get_verify_command(self, signature_filename, data_filename, + keystore=None): + """ + Return a suitable command for verifying a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The verifying command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + cmd.extend(['--verify', signature_filename, data_filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd + + def verify_signature(self, signature_filename, data_filename, + keystore=None): + """ + Verify a signature for a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: True if the signature was verified, else False. + """ + if not self.gpg: + raise DistlibException('verification unavailable because gpg ' + 'unavailable') + cmd = self.get_verify_command(signature_filename, data_filename, + keystore) + rc, stdout, stderr = self.run_command(cmd) + if rc not in (0, 1): + raise DistlibException('verify command failed with error ' + 'code %s' % rc) + return rc == 0 + + def download_file(self, url, destfile, digest=None, reporthook=None): + """ + This is a convenience method for downloading a file from an URL. + Normally, this will be a file from the index, though currently + no check is made for this (i.e. a file can be downloaded from + anywhere). + + The method is just like the :func:`urlretrieve` function in the + standard library, except that it allows digest computation to be + done during download and checking that the downloaded data + matched any expected value. + + :param url: The URL of the file to be downloaded (assumed to be + available via an HTTP GET request). + :param destfile: The pathname where the downloaded file is to be + saved. + :param digest: If specified, this must be a (hasher, value) + tuple, where hasher is the algorithm used (e.g. + ``'md5'``) and ``value`` is the expected value. + :param reporthook: The same as for :func:`urlretrieve` in the + standard library. + """ + if digest is None: + digester = None + logger.debug('No digest specified') + else: + if isinstance(digest, (list, tuple)): + hasher, digest = digest + else: + hasher = 'md5' + digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) + # The following code is equivalent to urlretrieve. + # We need to do it this way so that we can compute the + # digest of the file as we go. + with open(destfile, 'wb') as dfp: + # addinfourl is not a context manager on 2.x + # so we have to use try/finally + sfp = self.send_request(Request(url)) + try: + headers = sfp.info() + blocksize = 8192 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, blocksize, size) + while True: + block = sfp.read(blocksize) + if not block: + break + read += len(block) + dfp.write(block) + if digester: + digester.update(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, blocksize, size) + finally: + sfp.close() + + # check that we got the whole file, if we can + if size >= 0 and read < size: + raise DistlibException( + 'retrieval incomplete: got only %d out of %d bytes' + % (read, size)) + # if we have a digest, it must match. + if digester: + actual = digester.hexdigest() + if digest != actual: + raise DistlibException('%s digest mismatch for %s: expected ' + '%s, got %s' % (hasher, destfile, + digest, actual)) + logger.debug('Digest verified: %s', digest) + + def send_request(self, req): + """ + Send a standard library :class:`Request` to PyPI and return its + response. + + :param req: The request to send. + :return: The HTTP response from PyPI (a standard library HTTPResponse). + """ + handlers = [] + if self.password_handler: + handlers.append(self.password_handler) + if self.ssl_verifier: + handlers.append(self.ssl_verifier) + opener = build_opener(*handlers) + return opener.open(req) + + def encode_request(self, fields, files): + """ + Encode fields and files for posting to an HTTP server. + + :param fields: The fields to send as a list of (fieldname, value) + tuples. + :param files: The files to send as a list of (fieldname, filename, + file_bytes) tuple. + """ + # Adapted from packaging, which in turn was adapted from + # http://code.activestate.com/recipes/146306 + + parts = [] + boundary = self.boundary + for k, values in fields: + if not isinstance(values, (list, tuple)): + values = [values] + + for v in values: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"' % + k).encode('utf-8'), + b'', + v.encode('utf-8'))) + for key, filename, value in files: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)).encode('utf-8'), + b'', + value)) + + parts.extend((b'--' + boundary + b'--', b'')) + + body = b'\r\n'.join(parts) + ct = b'multipart/form-data; boundary=' + boundary + headers = { + 'Content-type': ct, + 'Content-length': str(len(body)) + } + return Request(self.url, body, headers) + + def search(self, terms, operator=None): # pragma: no cover + if isinstance(terms, string_types): + terms = {'name': terms} + rpc_proxy = ServerProxy(self.url, timeout=3.0) + try: + return rpc_proxy.search(terms, operator or 'and') + finally: + rpc_proxy('close')() diff --git a/venv/lib/python3.8/site-packages/distlib/locators.py b/venv/lib/python3.8/site-packages/distlib/locators.py new file mode 100644 index 0000000..966ebc0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/locators.py @@ -0,0 +1,1300 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# + +import gzip +from io import BytesIO +import json +import logging +import os +import posixpath +import re +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import zlib + +from . import DistlibException +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, + queue, quote, unescape, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, text_type, + Request, HTTPError, URLError) +from .database import Distribution, DistributionPath, make_dist +from .metadata import Metadata, MetadataInvalidError +from .util import (cached_property, ensure_slash, split_filename, get_project_data, + parse_requirement, parse_name_and_version, ServerProxy, + normalize_name) +from .version import get_scheme, UnsupportedVersionError +from .wheel import Wheel, is_compatible + +logger = logging.getLogger(__name__) + +HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') +CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) +HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') +DEFAULT_INDEX = 'https://pypi.org/pypi' + +def get_all_distribution_names(url=None): + """ + Return all distribution names known by an index. + :param url: The URL of the index. + :return: A list of all known distribution names. + """ + if url is None: + url = DEFAULT_INDEX + client = ServerProxy(url, timeout=3.0) + try: + return client.list_packages() + finally: + client('close')() + +class RedirectHandler(BaseRedirectHandler): + """ + A class to work around a bug in some Python 3.2.x releases. + """ + # There's a bug in the base version for some 3.2.x + # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header + # returns e.g. /abc, it bails because it says the scheme '' + # is bogus, when actually it should use the request's + # URL for the scheme. See Python issue #13696. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + newurl = None + for key in ('location', 'uri'): + if key in headers: + newurl = headers[key] + break + if newurl is None: # pragma: no cover + return + urlparts = urlparse(newurl) + if urlparts.scheme == '': + newurl = urljoin(req.get_full_url(), newurl) + if hasattr(headers, 'replace_header'): + headers.replace_header(key, newurl) + else: + headers[key] = newurl + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, + headers) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + +class Locator(object): + """ + A base class for locators - things that locate distributions. + """ + source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') + binary_extensions = ('.egg', '.exe', '.whl') + excluded_extensions = ('.pdf',) + + # A list of tags indicating which wheels you want to match. The default + # value of None matches against the tags compatible with the running + # Python. If you want to match other values, set wheel_tags on a locator + # instance to a list of tuples (pyver, abi, arch) which you want to match. + wheel_tags = None + + downloadable_extensions = source_extensions + ('.whl',) + + def __init__(self, scheme='default'): + """ + Initialise an instance. + :param scheme: Because locators look for most recent versions, they + need to know the version scheme to use. This specifies + the current PEP-recommended scheme - use ``'legacy'`` + if you need to support existing distributions on PyPI. + """ + self._cache = {} + self.scheme = scheme + # Because of bugs in some of the handlers on some of the platforms, + # we use our own opener rather than just using urlopen. + self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None + self.errors = queue.Queue() + + def get_errors(self): + """ + Return any errors which have occurred. + """ + result = [] + while not self.errors.empty(): # pragma: no cover + try: + e = self.errors.get(False) + result.append(e) + except self.errors.Empty: + continue + self.errors.task_done() + return result + + def clear_errors(self): + """ + Clear any errors which may have been logged. + """ + # Just get the errors and throw them away + self.get_errors() + + def clear_cache(self): + self._cache.clear() + + def _get_scheme(self): + return self._scheme + + def _set_scheme(self, value): + self._scheme = value + + scheme = property(_get_scheme, _set_scheme) + + def _get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This calls _get_project to do all the work, and just implements a caching layer on top. + """ + if self._cache is None: # pragma: no cover + result = self._get_project(name) + elif name in self._cache: + result = self._cache[name] + else: + self.clear_errors() + result = self._get_project(name) + self._cache[name] = result + return result + + def score_url(self, url): + """ + Give an url a score which can be used to choose preferred URLs + for a given project release. + """ + t = urlparse(url) + basename = posixpath.basename(t.path) + compatible = True + is_wheel = basename.endswith('.whl') + is_downloadable = basename.endswith(self.downloadable_extensions) + if is_wheel: + compatible = is_compatible(Wheel(basename), self.wheel_tags) + return (t.scheme == 'https', 'pypi.org' in t.netloc, + is_downloadable, is_wheel, compatible, basename) + + def prefer_url(self, url1, url2): + """ + Choose one of two URLs where both are candidates for distribution + archives for the same version of a distribution (for example, + .tar.gz vs. zip). + + The current implementation favours https:// URLs over http://, archives + from PyPI over those from other locations, wheel compatibility (if a + wheel) and then the archive name. + """ + result = url2 + if url1: + s1 = self.score_url(url1) + s2 = self.score_url(url2) + if s1 > s2: + result = url1 + if result != url2: + logger.debug('Not replacing %r with %r', url1, url2) + else: + logger.debug('Replacing %r with %r', url1, url2) + return result + + def split_filename(self, filename, project_name): + """ + Attempt to split a filename in project name, version and Python version. + """ + return split_filename(filename, project_name) + + def convert_url_to_download_info(self, url, project_name): + """ + See if a URL is a candidate for a download URL for a project (the URL + has typically been scraped from an HTML page). + + If it is, a dictionary is returned with keys "name", "version", + "filename" and "url"; otherwise, None is returned. + """ + def same_project(name1, name2): + return normalize_name(name1) == normalize_name(name2) + + result = None + scheme, netloc, path, params, query, frag = urlparse(url) + if frag.lower().startswith('egg='): # pragma: no cover + logger.debug('%s: version hint in fragment: %r', + project_name, frag) + m = HASHER_HASH.match(frag) + if m: + algo, digest = m.groups() + else: + algo, digest = None, None + origpath = path + if path and path[-1] == '/': # pragma: no cover + path = path[:-1] + if path.endswith('.whl'): + try: + wheel = Wheel(path) + if not is_compatible(wheel, self.wheel_tags): + logger.debug('Wheel not compatible: %s', path) + else: + if project_name is None: + include = True + else: + include = same_project(wheel.name, project_name) + if include: + result = { + 'name': wheel.name, + 'version': wheel.version, + 'filename': wheel.filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + 'python-version': ', '.join( + ['.'.join(list(v[2:])) for v in wheel.pyver]), + } + except Exception as e: # pragma: no cover + logger.warning('invalid path for wheel: %s', path) + elif not path.endswith(self.downloadable_extensions): # pragma: no cover + logger.debug('Not downloadable: %s', path) + else: # downloadable extension + path = filename = posixpath.basename(path) + for ext in self.downloadable_extensions: + if path.endswith(ext): + path = path[:-len(ext)] + t = self.split_filename(path, project_name) + if not t: # pragma: no cover + logger.debug('No match for project/version: %s', path) + else: + name, version, pyver = t + if not project_name or same_project(project_name, name): + result = { + 'name': name, + 'version': version, + 'filename': filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + #'packagetype': 'sdist', + } + if pyver: # pragma: no cover + result['python-version'] = pyver + break + if result and algo: + result['%s_digest' % algo] = digest + return result + + def _get_digest(self, info): + """ + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. + + Returns a 2-tuple (algo, digest) if found, else None. Currently + looks only for SHA256, then MD5. + """ + result = None + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break + return result + + def _update_version_data(self, result, info): + """ + Update a result dictionary (the final result from _get_project) with a + dictionary for a specific version, which typically holds information + gleaned from a filename or URL for an archive for the distribution. + """ + name = info.pop('name') + version = info.pop('version') + if version in result: + dist = result[version] + md = dist.metadata + else: + dist = make_dist(name, version, scheme=self.scheme) + md = dist.metadata + dist.digest = digest = self._get_digest(info) + url = info['url'] + result['digests'][url] = digest + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, url) + result['urls'].setdefault(version, set()).add(url) + dist.locator = self + result[version] = dist + + def locate(self, requirement, prereleases=False): + """ + Find the most recent distribution which matches the given + requirement. + + :param requirement: A requirement of the form 'foo (1.0)' or perhaps + 'foo (>= 1.0, < 2.0, != 1.3)' + :param prereleases: If ``True``, allow pre-release versions + to be located. Otherwise, pre-release versions + are not returned. + :return: A :class:`Distribution` instance, or ``None`` if no such + distribution could be located. + """ + result = None + r = parse_requirement(requirement) + if r is None: # pragma: no cover + raise DistlibException('Not a valid requirement: %r' % requirement) + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) + logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) + versions = self.get_project(r.name) + if len(versions) > 2: # urls and digests keys are present + # sometimes, versions are invalid + slist = [] + vcls = matcher.version_class + for k in versions: + if k in ('urls', 'digests'): + continue + try: + if not matcher.match(k): + pass # logger.debug('%s did not match %r', matcher, k) + else: + if prereleases or not vcls(k).is_prerelease: + slist.append(k) + # else: + # logger.debug('skipping pre-release ' + # 'version %s of %s', k, matcher.name) + except Exception: # pragma: no cover + logger.warning('error matching %s with %r', matcher, k) + pass # slist.append(k) + if len(slist) > 1: + slist = sorted(slist, key=scheme.key) + if slist: + logger.debug('sorted list: %s', slist) + version = slist[-1] + result = versions[version] + if result: + if r.extras: + result.extras = r.extras + result.download_urls = versions.get('urls', {}).get(version, set()) + d = {} + sd = versions.get('digests', {}) + for url in result.download_urls: + if url in sd: # pragma: no cover + d[url] = sd[url] + result.digests = d + self.matcher = None + return result + + +class PyPIRPCLocator(Locator): + """ + This locator uses XML-RPC to locate distributions. It therefore + cannot be used with simple mirrors (that only mirror file content). + """ + def __init__(self, url, **kwargs): + """ + Initialise an instance. + + :param url: The URL to use for XML-RPC. + :param kwargs: Passed to the superclass constructor. + """ + super(PyPIRPCLocator, self).__init__(**kwargs) + self.base_url = url + self.client = ServerProxy(url, timeout=3.0) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + return set(self.client.list_packages()) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + versions = self.client.package_releases(name, True) + for v in versions: + urls = self.client.release_urls(name, v) + data = self.client.release_data(name, v) + metadata = Metadata(scheme=self.scheme) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') + dist = Distribution(metadata) + if urls: + info = urls[0] + metadata.source_url = info['url'] + dist.digest = self._get_digest(info) + dist.locator = self + result[v] = dist + for info in urls: + url = info['url'] + digest = self._get_digest(info) + result['urls'].setdefault(v, set()).add(url) + result['digests'][url] = digest + return result + +class PyPIJSONLocator(Locator): + """ + This locator uses PyPI's JSON interface. It's very limited in functionality + and probably not worth using. + """ + def __init__(self, url, **kwargs): + super(PyPIJSONLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + url = urljoin(self.base_url, '%s/json' % quote(name)) + try: + resp = self.opener.open(url) + data = resp.read().decode() # for now + d = json.loads(data) + md = Metadata(scheme=self.scheme) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') + dist = Distribution(md) + dist.locator = self + urls = d['urls'] + result[md.version] = dist + for info in d['urls']: + url = info['url'] + dist.download_urls.add(url) + dist.digests[url] = self._get_digest(info) + result['urls'].setdefault(md.version, set()).add(url) + result['digests'][url] = self._get_digest(info) + # Now get other releases + for version, infos in d['releases'].items(): + if version == md.version: + continue # already done + omd = Metadata(scheme=self.scheme) + omd.name = md.name + omd.version = version + odist = Distribution(omd) + odist.locator = self + result[version] = odist + for info in infos: + url = info['url'] + odist.download_urls.add(url) + odist.digests[url] = self._get_digest(info) + result['urls'].setdefault(version, set()).add(url) + result['digests'][url] = self._get_digest(info) +# for info in urls: +# md.source_url = info['url'] +# dist.digest = self._get_digest(info) +# dist.locator = self +# for info in urls: +# url = info['url'] +# result['urls'].setdefault(md.version, set()).add(url) +# result['digests'][url] = self._get_digest(info) + except Exception as e: + self.errors.put(text_type(e)) + logger.exception('JSON fetch failed: %s', e) + return result + + +class Page(object): + """ + This class represents a scraped HTML page. + """ + # The following slightly hairy-looking regex just looks for the contents of + # an anchor link, which has an attribute "href" either immediately preceded + # or immediately followed by a "rel" attribute. The attribute values can be + # declared with double quotes, single quotes or no quotes - which leads to + # the length of the expression. + _href = re.compile(""" +(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)? +href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)) +(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))? +""", re.I | re.S | re.X) + _base = re.compile(r"""]+)""", re.I | re.S) + + def __init__(self, data, url): + """ + Initialise an instance with the Unicode page contents and the URL they + came from. + """ + self.data = data + self.base_url = self.url = url + m = self._base.search(self.data) + if m: + self.base_url = m.group(1) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + @cached_property + def links(self): + """ + Return the URLs of all the links on a page together with information + about their "rel" attribute, for determining which ones to treat as + downloads and which ones to queue for further scraping. + """ + def clean(url): + "Tidy up an URL." + scheme, netloc, path, params, query, frag = urlparse(url) + return urlunparse((scheme, netloc, quote(path), + params, query, frag)) + + result = set() + for match in self._href.finditer(self.data): + d = match.groupdict('') + rel = (d['rel1'] or d['rel2'] or d['rel3'] or + d['rel4'] or d['rel5'] or d['rel6']) + url = d['url1'] or d['url2'] or d['url3'] + url = urljoin(self.base_url, url) + url = unescape(url) + url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) + result.add((url, rel)) + # We sort the result, hoping to bring the most recent versions + # to the front + result = sorted(result, key=lambda t: t[0], reverse=True) + return result + + +class SimpleScrapingLocator(Locator): + """ + A locator which scrapes HTML pages to locate downloads for a distribution. + This runs multiple threads to do the I/O; performance is at least as good + as pip's PackageFinder, which works in an analogous fashion. + """ + + # These are used to deal with various Content-Encoding schemes. + decoders = { + 'deflate': zlib.decompress, + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(), + 'none': lambda b: b, + } + + def __init__(self, url, timeout=None, num_workers=10, **kwargs): + """ + Initialise an instance. + :param url: The root URL to use for scraping. + :param timeout: The timeout, in seconds, to be applied to requests. + This defaults to ``None`` (no timeout specified). + :param num_workers: The number of worker threads you want to do I/O, + This defaults to 10. + :param kwargs: Passed to the superclass. + """ + super(SimpleScrapingLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + self.timeout = timeout + self._page_cache = {} + self._seen = set() + self._to_fetch = queue.Queue() + self._bad_hosts = set() + self.skip_externals = False + self.num_workers = num_workers + self._lock = threading.RLock() + # See issue #45: we need to be resilient when the locator is used + # in a thread, e.g. with concurrent.futures. We can't use self._lock + # as it is for coordinating our internal threads - the ones created + # in _prepare_threads. + self._gplock = threading.RLock() + self.platform_check = False # See issue #112 + + def _prepare_threads(self): + """ + Threads are created only when get_project is called, and terminate + before it returns. They are there primarily to parallelise I/O (i.e. + fetching web pages). + """ + self._threads = [] + for i in range(self.num_workers): + t = threading.Thread(target=self._fetch) + t.daemon = True + t.start() + self._threads.append(t) + + def _wait_threads(self): + """ + Tell all the threads to terminate (by sending a sentinel value) and + wait for them to do so. + """ + # Note that you need two loops, since you can't say which + # thread will get each sentinel + for t in self._threads: + self._to_fetch.put(None) # sentinel + for t in self._threads: + t.join() + self._threads = [] + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + with self._gplock: + self.result = result + self.project_name = name + url = urljoin(self.base_url, '%s/' % quote(name)) + self._seen.clear() + self._page_cache.clear() + self._prepare_threads() + try: + logger.debug('Queueing %s', url) + self._to_fetch.put(url) + self._to_fetch.join() + finally: + self._wait_threads() + del self.result + return result + + platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|' + r'win(32|_amd64)|macosx_?\d+)\b', re.I) + + def _is_platform_dependent(self, url): + """ + Does an URL refer to a platform-specific download? + """ + return self.platform_dependent.search(url) + + def _process_download(self, url): + """ + See if an URL is a suitable download for a project. + + If it is, register information in the result dictionary (for + _get_project) about the specific version it's for. + + Note that the return value isn't actually used other than as a boolean + value. + """ + if self.platform_check and self._is_platform_dependent(url): + info = None + else: + info = self.convert_url_to_download_info(url, self.project_name) + logger.debug('process_download: %s -> %s', url, info) + if info: + with self._lock: # needed because self.result is shared + self._update_version_data(self.result, info) + return info + + def _should_queue(self, link, referrer, rel): + """ + Determine whether a link URL from a referring page and with a + particular "rel" attribute should be queued for scraping. + """ + scheme, netloc, path, _, _, _ = urlparse(link) + if path.endswith(self.source_extensions + self.binary_extensions + + self.excluded_extensions): + result = False + elif self.skip_externals and not link.startswith(self.base_url): + result = False + elif not referrer.startswith(self.base_url): + result = False + elif rel not in ('homepage', 'download'): + result = False + elif scheme not in ('http', 'https', 'ftp'): + result = False + elif self._is_platform_dependent(link): + result = False + else: + host = netloc.split(':', 1)[0] + if host.lower() == 'localhost': + result = False + else: + result = True + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, + referrer, result) + return result + + def _fetch(self): + """ + Get a URL to fetch from the work queue, get the HTML page, examine its + links for download candidates and candidates for further scraping. + + This is a handy method to run in a thread. + """ + while True: + url = self._to_fetch.get() + try: + if url: + page = self.get_page(url) + if page is None: # e.g. after an error + continue + for link, rel in page.links: + if link not in self._seen: + try: + self._seen.add(link) + if (not self._process_download(link) and + self._should_queue(link, url, rel)): + logger.debug('Queueing %s from %s', link, url) + self._to_fetch.put(link) + except MetadataInvalidError: # e.g. invalid versions + pass + except Exception as e: # pragma: no cover + self.errors.put(text_type(e)) + finally: + # always do this, to avoid hangs :-) + self._to_fetch.task_done() + if not url: + #logger.debug('Sentinel seen, quitting.') + break + + def get_page(self, url): + """ + Get the HTML for an URL, possibly from an in-memory cache. + + XXX TODO Note: this cache is never actually cleared. It's assumed that + the data won't get stale over the lifetime of a locator instance (not + necessarily true for the default_locator). + """ + # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api + scheme, netloc, path, _, _, _ = urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + url = urljoin(ensure_slash(url), 'index.html') + + if url in self._page_cache: + result = self._page_cache[url] + logger.debug('Returning %s from cache: %s', url, result) + else: + host = netloc.split(':', 1)[0] + result = None + if host in self._bad_hosts: + logger.debug('Skipping %s due to bad host %s', url, host) + else: + req = Request(url, headers={'Accept-encoding': 'identity'}) + try: + logger.debug('Fetching %s', url) + resp = self.opener.open(req, timeout=self.timeout) + logger.debug('Fetched %s', url) + headers = resp.info() + content_type = headers.get('Content-Type', '') + if HTML_CONTENT_TYPE.match(content_type): + final_url = resp.geturl() + data = resp.read() + encoding = headers.get('Content-Encoding') + if encoding: + decoder = self.decoders[encoding] # fail if not found + data = decoder(data) + encoding = 'utf-8' + m = CHARSET.search(content_type) + if m: + encoding = m.group(1) + try: + data = data.decode(encoding) + except UnicodeError: # pragma: no cover + data = data.decode('latin-1') # fallback + result = Page(data, final_url) + self._page_cache[final_url] = result + except HTTPError as e: + if e.code != 404: + logger.exception('Fetch failed: %s: %s', url, e) + except URLError as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + with self._lock: + self._bad_hosts.add(host) + except Exception as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + finally: + self._page_cache[url] = result # even if None (failure) + return result + + _distname_re = re.compile(']*>([^<]+)<') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + page = self.get_page(self.base_url) + if not page: + raise DistlibException('Unable to get %s' % self.base_url) + for match in self._distname_re.finditer(page.data): + result.add(match.group(1)) + return result + +class DirectoryLocator(Locator): + """ + This class locates distributions in a directory tree. + """ + + def __init__(self, path, **kwargs): + """ + Initialise an instance. + :param path: The root of the directory tree to search. + :param kwargs: Passed to the superclass constructor, + except for: + * recursive - if True (the default), subdirectories are + recursed into. If False, only the top-level directory + is searched, + """ + self.recursive = kwargs.pop('recursive', True) + super(DirectoryLocator, self).__init__(**kwargs) + path = os.path.abspath(path) + if not os.path.isdir(path): # pragma: no cover + raise DistlibException('Not a directory: %r' % path) + self.base_dir = path + + def should_include(self, filename, parent): + """ + Should a filename be considered as a candidate for a distribution + archive? As well as the filename, the directory which contains it + is provided, though not used by the current implementation. + """ + return filename.endswith(self.downloadable_extensions) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, name) + if info: + self._update_version_data(result, info) + if not self.recursive: + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, None) + if info: + result.add(info['name']) + if not self.recursive: + break + return result + +class JSONLocator(Locator): + """ + This locator uses special extended metadata (not available on PyPI) and is + the basis of performant dependency resolution in distlib. Other locators + require archive downloads before dependencies can be determined! As you + might imagine, that can be slow. + """ + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + data = get_project_data(name) + if data: + for info in data.get('files', []): + if info['ptype'] != 'sdist' or info['pyversion'] != 'source': + continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution + dist = make_dist(data['name'], info['version'], + summary=data.get('summary', + 'Placeholder for summary'), + scheme=self.scheme) + md = dist.metadata + md.source_url = info['url'] + # TODO SHA256 digest + if 'digest' in info and info['digest']: + dist.digest = ('md5', info['digest']) + md.dependencies = info.get('requirements', {}) + dist.exports = info.get('exports', {}) + result[dist.version] = dist + result['urls'].setdefault(dist.version, set()).add(info['url']) + return result + +class DistPathLocator(Locator): + """ + This locator finds installed distributions in a path. It can be useful for + adding to an :class:`AggregatingLocator`. + """ + def __init__(self, distpath, **kwargs): + """ + Initialise an instance. + + :param distpath: A :class:`DistributionPath` instance to search. + """ + super(DistPathLocator, self).__init__(**kwargs) + assert isinstance(distpath, DistributionPath) + self.distpath = distpath + + def _get_project(self, name): + dist = self.distpath.get_distribution(name) + if dist is None: + result = {'urls': {}, 'digests': {}} + else: + result = { + dist.version: dist, + 'urls': {dist.version: set([dist.source_url])}, + 'digests': {dist.version: set([None])} + } + return result + + +class AggregatingLocator(Locator): + """ + This class allows you to chain and/or merge a list of locators. + """ + def __init__(self, *locators, **kwargs): + """ + Initialise an instance. + + :param locators: The list of locators to search. + :param kwargs: Passed to the superclass constructor, + except for: + * merge - if False (the default), the first successful + search from any of the locators is returned. If True, + the results from all locators are merged (this can be + slow). + """ + self.merge = kwargs.pop('merge', False) + self.locators = locators + super(AggregatingLocator, self).__init__(**kwargs) + + def clear_cache(self): + super(AggregatingLocator, self).clear_cache() + for locator in self.locators: + locator.clear_cache() + + def _set_scheme(self, value): + self._scheme = value + for locator in self.locators: + locator.scheme = value + + scheme = property(Locator.scheme.fget, _set_scheme) + + def _get_project(self, name): + result = {} + for locator in self.locators: + d = locator.get_project(name) + if d: + if self.merge: + files = result.get('urls', {}) + digests = result.get('digests', {}) + # next line could overwrite result['urls'], result['digests'] + result.update(d) + df = result.get('urls') + if files and df: + for k, v in files.items(): + if k in df: + df[k] |= v + else: + df[k] = v + dd = result.get('digests') + if digests and dd: + dd.update(digests) + else: + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for locator in self.locators: + try: + result |= locator.get_distribution_names() + except NotImplementedError: + pass + return result + + +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 440. +default_locator = AggregatingLocator( + # JSONLocator(), # don't use as PEP 426 is withdrawn + SimpleScrapingLocator('https://pypi.org/simple/', + timeout=3.0), + scheme='legacy') + +locate = default_locator.locate + + +class DependencyFinder(object): + """ + Locate dependencies for distributions. + """ + + def __init__(self, locator=None): + """ + Initialise an instance, using the specified locator + to locate distributions. + """ + self.locator = locator or default_locator + self.scheme = get_scheme(self.locator.scheme) + + def add_distribution(self, dist): + """ + Add a distribution to the finder. This will update internal information + about who provides what. + :param dist: The distribution to add. + """ + logger.debug('adding distribution %s', dist) + name = dist.key + self.dists_by_name[name] = dist + self.dists[(name, dist.version)] = dist + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + self.provided.setdefault(name, set()).add((version, dist)) + + def remove_distribution(self, dist): + """ + Remove a distribution from the finder. This will update internal + information about who provides what. + :param dist: The distribution to remove. + """ + logger.debug('removing distribution %s', dist) + name = dist.key + del self.dists_by_name[name] + del self.dists[(name, dist.version)] + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Remove from provided: %s, %s, %s', name, version, dist) + s = self.provided[name] + s.remove((version, dist)) + if not s: + del self.provided[name] + + def get_matcher(self, reqt): + """ + Get a version matcher for a requirement. + :param reqt: The requirement + :type reqt: str + :return: A version matcher (an instance of + :class:`distlib.version.Matcher`). + """ + try: + matcher = self.scheme.matcher(reqt) + except UnsupportedVersionError: # pragma: no cover + # XXX compat-mode if cannot read the version + name = reqt.split()[0] + matcher = self.scheme.matcher(name) + return matcher + + def find_providers(self, reqt): + """ + Find the distributions which can fulfill a requirement. + + :param reqt: The requirement. + :type reqt: str + :return: A set of distribution which can fulfill the requirement. + """ + matcher = self.get_matcher(reqt) + name = matcher.key # case-insensitive + result = set() + provided = self.provided + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + result.add(provider) + break + return result + + def try_to_replace(self, provider, other, problems): + """ + Attempt to replace one provider with another. This is typically used + when resolving dependencies from multiple sources, e.g. A requires + (B >= 1.0) while C requires (B >= 1.1). + + For successful replacement, ``provider`` must meet all the requirements + which ``other`` fulfills. + + :param provider: The provider we are trying to replace with. + :param other: The provider we're trying to replace. + :param problems: If False is returned, this will contain what + problems prevented replacement. This is currently + a tuple of the literal string 'cantreplace', + ``provider``, ``other`` and the set of requirements + that ``provider`` couldn't fulfill. + :return: True if we can replace ``other`` with ``provider``, else + False. + """ + rlist = self.reqts[other] + unmatched = set() + for s in rlist: + matcher = self.get_matcher(s) + if not matcher.match(provider.version): + unmatched.add(s) + if unmatched: + # can't replace other with provider + problems.add(('cantreplace', provider, other, + frozenset(unmatched))) + result = False + else: + # can replace other with provider + self.remove_distribution(other) + del self.reqts[other] + for s in rlist: + self.reqts.setdefault(provider, set()).add(s) + self.add_distribution(provider) + result = True + return result + + def find(self, requirement, meta_extras=None, prereleases=False): + """ + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. + + Return a set of :class:`Distribution` instances and a set of + problems. + + The distributions returned should be such that they have the + :attr:`required` attribute set to ``True`` if they were + from the ``requirement`` passed to ``find()``, and they have the + :attr:`build_time_dependency` attribute set to ``True`` unless they + are post-installation dependencies of the ``requirement``. + + The problems should be a tuple consisting of the string + ``'unsatisfied'`` and the requirement which couldn't be satisfied + by any distribution known to the locator. + """ + + self.provided = {} + self.dists = {} + self.dists_by_name = {} + self.reqts = {} + + meta_extras = set(meta_extras or []) + if ':*:' in meta_extras: + meta_extras.remove(':*:') + # :meta: and :run: are implicitly included + meta_extras |= set([':test:', ':build:', ':dev:']) + + if isinstance(requirement, Distribution): + dist = odist = requirement + logger.debug('passed %s as requirement', odist) + else: + dist = odist = self.locator.locate(requirement, + prereleases=prereleases) + if dist is None: + raise DistlibException('Unable to locate %r' % requirement) + logger.debug('located %s', odist) + dist.requested = True + problems = set() + todo = set([dist]) + install_dists = set([odist]) + while todo: + dist = todo.pop() + name = dist.key # case-insensitive + if name not in self.dists_by_name: + self.add_distribution(dist) + else: + #import pdb; pdb.set_trace() + other = self.dists_by_name[name] + if other != dist: + self.try_to_replace(dist, other, problems) + + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires + ereqts = set() + if meta_extras and dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts + for r in all_reqts: + providers = self.find_providers(r) + if not providers: + logger.debug('No providers found for %r', r) + provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) + if provider is None: + logger.debug('Cannot satisfy %r', r) + problems.add(('unsatisfied', r)) + else: + n, v = provider.key, provider.version + if (n, v) not in self.dists: + todo.add(provider) + providers.add(provider) + if r in ireqts and dist in install_dists: + install_dists.add(provider) + logger.debug('Adding %s to install_dists', + provider.name_and_version) + for p in providers: + name = p.key + if name not in self.dists_by_name: + self.reqts.setdefault(p, set()).add(r) + else: + other = self.dists_by_name[name] + if other != p: + # see if other can be replaced by p + self.try_to_replace(p, other, problems) + + dists = set(self.dists.values()) + for dist in dists: + dist.build_time_dependency = dist not in install_dists + if dist.build_time_dependency: + logger.debug('%s is a build-time dependency only.', + dist.name_and_version) + logger.debug('find done for %s', odist) + return dists, problems diff --git a/venv/lib/python3.8/site-packages/distlib/manifest.py b/venv/lib/python3.8/site-packages/distlib/manifest.py new file mode 120000 index 0000000..40c5a1c --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/manifest.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9d/01/21/626828ade681673c85cf062c5f124046eddfa38124ba7535eb7535ea21 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/distlib/markers.py b/venv/lib/python3.8/site-packages/distlib/markers.py new file mode 120000 index 0000000..68848bf --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/markers.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4e/91/c7/1cb824cf24fb6076f08feda2eb07916aaf88bf2dbe3149eb0e48dabbe5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/distlib/metadata.py b/venv/lib/python3.8/site-packages/distlib/metadata.py new file mode 100644 index 0000000..c329e19 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/metadata.py @@ -0,0 +1,1076 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and 2.2). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \\|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', + 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', + 'Obsoletes-Dist', 'Requires-External', 'Maintainer', + 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', + 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', + 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', + 'Setup-Requires-Dist', 'Extension') + +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +# Ditto for Obsoletes - see issue #140. +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', + 'Requires', 'Provides', 'Obsoletes') + +_566_MARKERS = ('Description-Content-Type',) + +_643_MARKERS = ('Dynamic', 'License-File') + +_643_FIELDS = _566_FIELDS + _643_MARKERS + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) +_ALL_FIELDS.update(_566_FIELDS) +_ALL_FIELDS.update(_643_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version in ('1.3', '2.1'): + # avoid adding field names if already there + return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS) + elif version == '2.0': + raise ValueError('Metadata 2.0 is withdrawn and not supported') + # return _426_FIELDS + elif version == '2.2': + return _643_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): + for marker in markers: + if marker in keys: + return True + return False + + keys = [] + for key, value in fields.items(): + if value in ([], 'UNKNOWN', None): + continue + keys.append(key) + + possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.1', '2.2'] # 2.0 removed + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + logger.debug('Removed 1.0 due to %s', key) + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + logger.debug('Removed 1.1 due to %s', key) + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + logger.debug('Removed 1.2 due to %s', key) + if key not in _566_FIELDS and '1.3' in possible_versions: + possible_versions.remove('1.3') + logger.debug('Removed 1.3 due to %s', key) + if key not in _566_FIELDS and '2.1' in possible_versions: + if key != 'Description': # In 2.1, description allowed after headers + possible_versions.remove('2.1') + logger.debug('Removed 2.1 due to %s', key) + if key not in _643_FIELDS and '2.2' in possible_versions: + possible_versions.remove('2.2') + logger.debug('Removed 2.2 due to %s', key) + # if key not in _426_FIELDS and '2.0' in possible_versions: + # possible_versions.remove('2.0') + # logger.debug('Removed 2.0 due to %s', key) + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + logger.debug('Out of options - unknown metadata set: %s', fields) + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) + # is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + is_2_2 = '2.2' in possible_versions and _has_marker(keys, _643_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_2) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.1/2.2 fields') + + # we have the choice, 1.0, or 1.2, 2.1 or 2.2 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.1 adds more features + # - 2.2 is the latest + if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_2: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + if is_2_1: + return '2.1' + # if is_2_2: + # return '2.2' + + return '2.2' + +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 +_ATTR2FIELD = { + name.lower().replace("-", "_"): name for name in _ALL_FIELDS +} +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python',) +_VERSION_FIELDS = ('Version',) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', + 'Requires', 'Provides', 'Obsoletes-Dist', + 'Provides-Dist', 'Requires-Dist', 'Requires-External', + 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension', 'License-File') +_LISTTUPLEFIELDS = ('Project-URL',) + +_ELEMENTSFIELD = ('Keywords',) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or + self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + +# dependencies = property(_get_dependencies, _set_dependencies) + + def get_fullname(self, filesafe=False): + """Return the distribution name with version. + + If filesafe is true, return a filename-escaped form.""" + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] + # logger.debug('Attempting to set metadata for %s', self) + # self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning( + "'%s': '%s' is not valid (field '%s')", + project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, + scheme.is_valid_constraint_list), + (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append("Wrong value for '%s': %s" % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. + """ + self.set_metadata_version() + + fields = _version2fieldlist(self['Metadata-Version']) + + data = {} + + for field_name in fields: + if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' +LEGACY_METADATA_FILENAME = 'METADATA' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.1 + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + FIELDNAME_MATCHER = re.compile('^[A-Z]([0-9A-Z-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + 'dynamic': (FIELDNAME_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if not d: + d = self._data.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError("'%s' is an invalid value for " + "the '%s' property" % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + ('extensions', 'python.details', 'license'): 'License', + 'summary': 'Summary', + 'description': 'Description', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + # import pdb; pdb.set_trace() + for nk, ok in self.LEGACY_MAPPING.items(): + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: any other fields wanted + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, + sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, + sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = { 'requires': requirements } + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/venv/lib/python3.8/site-packages/distlib/resources.py b/venv/lib/python3.8/site-packages/distlib/resources.py new file mode 120000 index 0000000..1974508 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/resources.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/2f/06/cf/92c73403524c6e2e979ee3dd301527f375fb04fb85356a8f184288ebdf \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/distlib/scripts.py b/venv/lib/python3.8/site-packages/distlib/scripts.py new file mode 100644 index 0000000..d270624 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/scripts.py @@ -0,0 +1,437 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys +import time +from zipfile import ZipInfo + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, + get_executable, get_platform, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +from %(module)s import %(import_name)s +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +''' + + +def enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, source_dir, target_dir, add_launchers=True, + dry_run=False, fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' and + os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or ( + os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _build_shebang(self, executable, post_interp): + """ + Build a shebang line. In the simple case (on Windows, or a shebang line + which is not too long or contains spaces) use a simple formulation for + the shebang. Otherwise, use /bin/sh as the executable, with a contrived + shebang which allows the script to run either under Python or sh, using + suitable quoting. Thanks to Harald Nordgren for his input. + + See also: http://www.in-ulm.de/~mascheck/various/shebang/#length + https://hg.mozilla.org/mozilla-central/file/tip/mach + """ + if os.name != 'posix': + simple_shebang = True + else: + # Add 3 for '#!' prefix and newline suffix. + shebang_length = len(executable) + len(post_interp) + 3 + if sys.platform == 'darwin': + max_shebang_length = 512 + else: + max_shebang_length = 127 + simple_shebang = ((b' ' not in executable) and + (shebang_length <= max_shebang_length)) + + if simple_shebang: + result = b'#!' + executable + post_interp + b'\n' + else: + result = b'#!/bin/sh\n' + result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' + result += b"' '''" + return result + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join(sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) + if not os.path.isfile(executable): + # for Python builds from source on Windows, no Python executables with + # a version suffix are created, so we use python.exe + executable = os.path.join(sysconfig.get_config_var('BINDIR'), + 'python%s' % (sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp + and '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = self._build_shebang(executable, post_interp) + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable from utf-8' % shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict(module=entry.prefix, + import_name=entry.suffix.split('.')[0], + func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + linesep = os.linesep.encode('utf-8') + if not shebang.endswith(linesep): + shebang += linesep + if not use_launcher: + script_bytes = shebang + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') + if source_date_epoch: + date_time = time.gmtime(int(source_date_epoch))[:6] + zinfo = ZipInfo(filename='__main__.py', date_time=date_time) + zf.writestr(zinfo, script_bytes) + else: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + variant_separator = '-' + + def get_script_filenames(self, name): + result = set() + if '' in self.variants: + result.add(name) + if 'X' in self.variants: + result.add('%s%s' % (name, self.version_info[0])) + if 'X.Y' in self.variants: + result.add('%s%s%s.%s' % (name, self.variant_separator, + self.version_info[0], self.version_info[1])) + return result + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + scriptnames = self.get_script_filenames(entry.name) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s is an empty file (skipping)', script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, + self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + platform_suffix = '-arm' if get_platform() == 'win-arm64' else '' + name = '%s%s%s.exe' % (kind, bits, platform_suffix) + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + distlib_package = __name__.rsplit('.', 1)[0] + resource = finder(distlib_package).find(name) + if not resource: + msg = ('Unable to find resource %s in package %s' % (name, + distlib_package)) + raise ValueError(msg) + return resource.bytes + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/venv/lib/python3.8/site-packages/distlib/t32.exe b/venv/lib/python3.8/site-packages/distlib/t32.exe new file mode 100644 index 0000000..0aaa386 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/t32.exe differ diff --git a/venv/lib/python3.8/site-packages/distlib/t64-arm.exe b/venv/lib/python3.8/site-packages/distlib/t64-arm.exe new file mode 100644 index 0000000..a759e27 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/t64-arm.exe differ diff --git a/venv/lib/python3.8/site-packages/distlib/t64.exe b/venv/lib/python3.8/site-packages/distlib/t64.exe new file mode 100644 index 0000000..82fe2d9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/t64.exe differ diff --git a/venv/lib/python3.8/site-packages/distlib/util.py b/venv/lib/python3.8/site-packages/distlib/util.py new file mode 120000 index 0000000..ed78ed6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/util.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/df/57/4f/5e7dd17dab74c592de568169ba78b285eeafb1b97dfd037ea9df4b8659 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/distlib/version.py b/venv/lib/python3.8/site-packages/distlib/version.py new file mode 120000 index 0000000..2448d9b --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/version.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/58/6f/ff/2f201ad86c2603aa92a0426dbc913c4440352d9a5b4a2cf2f16be124b9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/distlib/w32.exe b/venv/lib/python3.8/site-packages/distlib/w32.exe new file mode 100644 index 0000000..f2e73aa Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/w32.exe differ diff --git a/venv/lib/python3.8/site-packages/distlib/w64-arm.exe b/venv/lib/python3.8/site-packages/distlib/w64-arm.exe new file mode 100644 index 0000000..b998321 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/w64-arm.exe differ diff --git a/venv/lib/python3.8/site-packages/distlib/w64.exe b/venv/lib/python3.8/site-packages/distlib/w64.exe new file mode 100644 index 0000000..9a6b894 Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/w64.exe differ diff --git a/venv/lib/python3.8/site-packages/distlib/wheel.py b/venv/lib/python3.8/site-packages/distlib/wheel.py new file mode 100644 index 0000000..028c2d9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/distlib/wheel.py @@ -0,0 +1,1082 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2020 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import base64 +import codecs +import datetime +from email import message_from_file +import hashlib +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import zipfile + +from . import __version__, DistlibException +from .compat import sysconfig, ZipFile, fsdecode, text_type, filter +from .database import InstalledDistribution +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, + cached_property, get_cache_base, read_exports, tempdir, + get_platform) +from .version import NormalizedVersion, UnsupportedVersionError + +logger = logging.getLogger(__name__) + +cache = None # created when needed + +if hasattr(sys, 'pypy_version_info'): # pragma: no cover + IMP_PREFIX = 'pp' +elif sys.platform.startswith('java'): # pragma: no cover + IMP_PREFIX = 'jy' +elif sys.platform == 'cli': # pragma: no cover + IMP_PREFIX = 'ip' +else: + IMP_PREFIX = 'cp' + +VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') +if not VER_SUFFIX: # pragma: no cover + VER_SUFFIX = '%s%s' % sys.version_info[:2] +PYVER = 'py' + VER_SUFFIX +IMPVER = IMP_PREFIX + VER_SUFFIX + +ARCH = get_platform().replace('-', '_').replace('.', '_') + +ABI = sysconfig.get_config_var('SOABI') +if ABI and ABI.startswith('cpython-'): + ABI = ABI.replace('cpython-', 'cp').split('-')[0] +else: + def _derive_abi(): + parts = ['cp', VER_SUFFIX] + if sysconfig.get_config_var('Py_DEBUG'): + parts.append('d') + if IMP_PREFIX == 'cp': + vi = sys.version_info[:2] + if vi < (3, 8): + wpm = sysconfig.get_config_var('WITH_PYMALLOC') + if wpm is None: + wpm = True + if wpm: + parts.append('m') + if vi < (3, 3): + us = sysconfig.get_config_var('Py_UNICODE_SIZE') + if us == 4 or (us is None and sys.maxunicode == 0x10FFFF): + parts.append('u') + return ''.join(parts) + ABI = _derive_abi() + del _derive_abi + +FILENAME_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))? +-(?P\w+\d+(\.\w+\d+)*) +-(?P\w+) +-(?P\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + +if sys.version_info[0] < 3: + import imp +else: + imp = None + import importlib.machinery + import importlib.util + +def _get_suffixes(): + if imp: + return [s[0] for s in imp.get_suffixes()] + else: + return importlib.machinery.EXTENSION_SUFFIXES + +def _load_dynamic(name, path): + # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly + if imp: + return imp.load_dynamic(name, path) + else: + spec = importlib.util.spec_from_file_location(name, path) + module = importlib.util.module_from_spec(spec) + sys.modules[name] = module + spec.loader.exec_module(module) + return module + +class Mounter(object): + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = _load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, + pyver, abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + wheel_metadata = self.get_wheel_metadata(zf) + wv = wheel_metadata['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] + result = None + for fn in fns: + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + if result: + break + except KeyError: + pass + if not result: + raise ValueError('Invalid wheel, because metadata is ' + 'missing: looked in %s' % ', '.join(fns)) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, archive_record_path): + records = list(records) # make a copy, as mutated + records.append((archive_record_path, '', '')) + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + ap = to_posix(os.path.join(info_dir, 'RECORD')) + self.write_record(records, p, ap) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. If kwarg ``bytecode_hashed_invalidation`` is True, written + bytecode will try to use file-hash based invalidation (PEP-552) on + supported interpreter versions (CPython 2.7+). + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) + and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + # Issue #147: permission bits aren't preserved. Using + # zf.extract(zinfo, libdir) should have worked, but didn't, + # see https://www.thetopsites.net/article/53834422.shtml + # So ... manually preserve permission bits as given in zinfo + if os.name == 'posix': + # just set the normal permission bits + os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile(outfile, + hashed_invalidation=bc_hashed_invalidation) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', + exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' [%s]' % ','.join(v.flags) + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True } + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), + '%s.%s' % sys.version_info[:2]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(pathname) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp(file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + v = NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], + '.'.join(str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = path.endswith(LEGACY_METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, + updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', + prefix='wheel-update-', + dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + +def _get_glibc_version(): + import platform + ver = platform.libc_ver() + result = [] + if ver[0] == 'glibc': + for s in ver[1].split('.'): + result.append(int(s) if s.isdigit() else 0) + result = tuple(result) + return result + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + versions = [VER_SUFFIX] + major = VER_SUFFIX[0] + for minor in range(sys.version_info[1] - 1, - 1, -1): + versions.append(''.join([major, str(minor)])) + + abis = [] + for suffix in _get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for abi in abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) + # manylinux + if abi != 'none' and sys.platform.startswith('linux'): + arch = arch.replace('linux_', '') + parts = _get_glibc_version() + if len(parts) == 2: + if parts >= (2, 5): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux1_%s' % arch)) + if parts >= (2, 12): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux2010_%s' % arch)) + if parts >= (2, 17): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux2014_%s' % arch)) + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux_%s_%s_%s' % (parts[0], parts[1], + arch))) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version in enumerate(versions): + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version in enumerate(versions): + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/venv/lib/python3.8/site-packages/distutils-precedence.pth b/venv/lib/python3.8/site-packages/distutils-precedence.pth new file mode 120000 index 0000000..023640d --- /dev/null +++ b/venv/lib/python3.8/site-packages/distutils-precedence.pth @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7e/a7/ff/ef3fe2a117ee12c68ed6553617f0d7fd2f0590257c25c484959a3b7373 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/LICENSE b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/LICENSE new file mode 120000 index 0000000..6c06c3a --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/88/d9/b4/eb60579c191ec391ca04c16130572d7eedc4a86daa58bf28c6e14c9bcd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/METADATA new file mode 120000 index 0000000..5c2305c --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/9f/eb/b4/24f0dc862dde378b8509142fb0d90a0bf0b5d10c3b3eacbd590c700213 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/RECORD new file mode 100644 index 0000000..ca8f078 --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/RECORD @@ -0,0 +1,24 @@ +filelock-3.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +filelock-3.7.1.dist-info/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210 +filelock-3.7.1.dist-info/METADATA,sha256=n-u0JPDchi3eN4uFCRQvsNkKC_C10Qw7Pqy9WQxwAhM,2537 +filelock-3.7.1.dist-info/RECORD,, +filelock-3.7.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +filelock-3.7.1.dist-info/top_level.txt,sha256=NDrf9i5BNogz4hEdsr6Hi7Ws3TlSSKY4Q2Y9_-i2GwU,9 +filelock-3.7.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +filelock/__init__.py,sha256=XE-RanOqTbJkFCKJbe3PqzNCkVgLQXqs_Yp1rrUPF58,1246 +filelock/__pycache__/__init__.cpython-38.pyc,, +filelock/__pycache__/_api.cpython-38.pyc,, +filelock/__pycache__/_error.cpython-38.pyc,, +filelock/__pycache__/_soft.cpython-38.pyc,, +filelock/__pycache__/_unix.cpython-38.pyc,, +filelock/__pycache__/_util.cpython-38.pyc,, +filelock/__pycache__/_windows.cpython-38.pyc,, +filelock/__pycache__/version.cpython-38.pyc,, +filelock/_api.py,sha256=bwQbBjCWXfBhR5e6I9lRgSZPiuBWF8bkMqrr2NdmYIo,8860 +filelock/_error.py,sha256=Gaxp2TfdmgdvYFkllGCBOE37vYIXnHKKW3RYfKH7DYM,399 +filelock/_soft.py,sha256=rSpmt4Oi0Eb4JeKzyWImeqf5MYCJR0Dyc_kUN3kHj7Y,1650 +filelock/_unix.py,sha256=gM4-5mqDtamGp5qwWkaZNSTuv9p7vwBa4diVmI1ZBwQ,1578 +filelock/_util.py,sha256=BZKOAYTQdmcHmF34-Ft4kMdEvk3a8NGtsAhe6kfxZuU,594 +filelock/_windows.py,sha256=wvg-_SfJEDyxDeDVH8wBqxbPquXaycoYXgXJOBmm-G4,1890 +filelock/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +filelock/version.py,sha256=9aLd-KoN-y8Y2gRKw7z6WVYrMEaHPtFGPxC-kvG_yik,142 diff --git a/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/WHEEL new file mode 120000 index 0000000..7e89bc1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1b/5e/87/e00dc87a84269cead8578b9e6462928e18a95f1f3373c9eef451a5bcc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/top_level.txt new file mode 120000 index 0000000..699b46c --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/34/3a/df/f62e41368833e2111db2be878bb5acdd395248a63843663dffe8b61b05 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/zip-safe b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/zip-safe new file mode 120000 index 0000000..e11f061 --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock-3.7.1.dist-info/zip-safe @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/01/ba/47/19c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock/__init__.py b/venv/lib/python3.8/site-packages/filelock/__init__.py new file mode 120000 index 0000000..de3b65c --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5c/4f/91/6a73aa4db2641422896dedcfab334291580b417aacfd8a75aeb50f179f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..c86a843 Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_api.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_api.cpython-38.pyc new file mode 100644 index 0000000..206ebca Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_api.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_error.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_error.cpython-38.pyc new file mode 100644 index 0000000..1b2f13e Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_error.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_soft.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_soft.cpython-38.pyc new file mode 100644 index 0000000..26a3c8c Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_soft.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_unix.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_unix.cpython-38.pyc new file mode 100644 index 0000000..3183647 Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_unix.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_util.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_util.cpython-38.pyc new file mode 100644 index 0000000..b4345fa Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_util.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_windows.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_windows.cpython-38.pyc new file mode 100644 index 0000000..8228892 Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_windows.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/version.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000..1f62315 Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/version.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/filelock/_api.py b/venv/lib/python3.8/site-packages/filelock/_api.py new file mode 120000 index 0000000..c38f51d --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock/_api.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6f/04/1b/0630965df0614797ba23d95181264f8ae05617c6e432aaebd8d766608a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock/_error.py b/venv/lib/python3.8/site-packages/filelock/_error.py new file mode 120000 index 0000000..e2b46cb --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock/_error.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/19/ac/69/d937dd9a076f605925946081384dfbbd82179c728a5b74587ca1fb0d83 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock/_soft.py b/venv/lib/python3.8/site-packages/filelock/_soft.py new file mode 120000 index 0000000..8ecbabe --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock/_soft.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ad/2a/66/b783a2d046f825e2b3c962267aa7f93180894740f273f9143779078fb6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock/_unix.py b/venv/lib/python3.8/site-packages/filelock/_unix.py new file mode 120000 index 0000000..98c86b1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock/_unix.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/80/ce/3e/e66a83b5a986a79ab05a46993524eebfda7bbf005ae1d895988d590704 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock/_util.py b/venv/lib/python3.8/site-packages/filelock/_util.py new file mode 120000 index 0000000..215a9be --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock/_util.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/05/92/8e/0184d0766707985df8f85b7890c744be4ddaf0d1adb0085eea47f166e5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock/_windows.py b/venv/lib/python3.8/site-packages/filelock/_windows.py new file mode 120000 index 0000000..a6d8663 --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock/_windows.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c2/f8/3e/fd27c9103cb10de0d51fcc01ab16cfaae5dac9ca185e05c93819a6f86e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock/py.typed b/venv/lib/python3.8/site-packages/filelock/py.typed new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/filelock/version.py b/venv/lib/python3.8/site-packages/filelock/version.py new file mode 120000 index 0000000..f62e797 --- /dev/null +++ b/venv/lib/python3.8/site-packages/filelock/version.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f5/a2/dd/f8aa0dfb2f18da044ac3bcfa59562b3046873ed1463f10be92f1bfca29 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/flask/__init__.py b/venv/lib/python3.8/site-packages/flask/__init__.py new file mode 100644 index 0000000..e02531c --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/__init__.py @@ -0,0 +1,71 @@ +from markupsafe import escape +from markupsafe import Markup + +from . import json as json +from .app import Flask as Flask +from .app import Request as Request +from .app import Response as Response +from .blueprints import Blueprint as Blueprint +from .config import Config as Config +from .ctx import after_this_request as after_this_request +from .ctx import copy_current_request_context as copy_current_request_context +from .ctx import has_app_context as has_app_context +from .ctx import has_request_context as has_request_context +from .globals import current_app as current_app +from .globals import g as g +from .globals import request as request +from .globals import session as session +from .helpers import abort as abort +from .helpers import flash as flash +from .helpers import get_flashed_messages as get_flashed_messages +from .helpers import get_template_attribute as get_template_attribute +from .helpers import make_response as make_response +from .helpers import redirect as redirect +from .helpers import send_file as send_file +from .helpers import send_from_directory as send_from_directory +from .helpers import stream_with_context as stream_with_context +from .helpers import url_for as url_for +from .json import jsonify as jsonify +from .signals import appcontext_popped as appcontext_popped +from .signals import appcontext_pushed as appcontext_pushed +from .signals import appcontext_tearing_down as appcontext_tearing_down +from .signals import before_render_template as before_render_template +from .signals import got_request_exception as got_request_exception +from .signals import message_flashed as message_flashed +from .signals import request_finished as request_finished +from .signals import request_started as request_started +from .signals import request_tearing_down as request_tearing_down +from .signals import signals_available as signals_available +from .signals import template_rendered as template_rendered +from .templating import render_template as render_template +from .templating import render_template_string as render_template_string +from .templating import stream_template as stream_template +from .templating import stream_template_string as stream_template_string + +__version__ = "2.2.2" + + +def __getattr__(name): + if name == "_app_ctx_stack": + import warnings + from .globals import __app_ctx_stack + + warnings.warn( + "'_app_ctx_stack' is deprecated and will be removed in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + return __app_ctx_stack + + if name == "_request_ctx_stack": + import warnings + from .globals import __request_ctx_stack + + warnings.warn( + "'_request_ctx_stack' is deprecated and will be removed in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + return __request_ctx_stack + + raise AttributeError(name) diff --git a/venv/lib/python3.8/site-packages/flask/__main__.py b/venv/lib/python3.8/site-packages/flask/__main__.py new file mode 120000 index 0000000..880171c --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/__main__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/6d/8b/7d/7846a845059d7a3107143f11131f63c5511d669b44085b15ec5e3d2279 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..9554358 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/__main__.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000..ec4578d Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/__main__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/app.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/app.cpython-38.pyc new file mode 100644 index 0000000..d38d0a1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/app.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/blueprints.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/blueprints.cpython-38.pyc new file mode 100644 index 0000000..5502e01 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/blueprints.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/cli.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/cli.cpython-38.pyc new file mode 100644 index 0000000..1674609 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/cli.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/config.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/config.cpython-38.pyc new file mode 100644 index 0000000..22ac207 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/config.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/ctx.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/ctx.cpython-38.pyc new file mode 100644 index 0000000..5c9cba4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/ctx.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/debughelpers.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/debughelpers.cpython-38.pyc new file mode 100644 index 0000000..ca1715a Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/debughelpers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/globals.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/globals.cpython-38.pyc new file mode 100644 index 0000000..27aca80 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/globals.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/helpers.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/helpers.cpython-38.pyc new file mode 100644 index 0000000..ed80e7d Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/helpers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/logging.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/logging.cpython-38.pyc new file mode 100644 index 0000000..de9c17f Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/logging.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/scaffold.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/scaffold.cpython-38.pyc new file mode 100644 index 0000000..a9e89b1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/scaffold.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/sessions.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/sessions.cpython-38.pyc new file mode 100644 index 0000000..8683eb4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/sessions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/signals.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/signals.cpython-38.pyc new file mode 100644 index 0000000..b95db01 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/signals.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/templating.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/templating.cpython-38.pyc new file mode 100644 index 0000000..5386b3e Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/templating.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/testing.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/testing.cpython-38.pyc new file mode 100644 index 0000000..417e531 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/testing.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/typing.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/typing.cpython-38.pyc new file mode 100644 index 0000000..1d4d626 Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/typing.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/views.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/views.cpython-38.pyc new file mode 100644 index 0000000..691548c Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/views.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/__pycache__/wrappers.cpython-38.pyc b/venv/lib/python3.8/site-packages/flask/__pycache__/wrappers.cpython-38.pyc new file mode 100644 index 0000000..820c29b Binary files /dev/null and b/venv/lib/python3.8/site-packages/flask/__pycache__/wrappers.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/flask/app.py b/venv/lib/python3.8/site-packages/flask/app.py new file mode 100644 index 0000000..db442c9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/app.py @@ -0,0 +1,2548 @@ +import functools +import inspect +import json +import logging +import os +import sys +import typing as t +import weakref +from collections.abc import Iterator as _abc_Iterator +from datetime import timedelta +from itertools import chain +from threading import Lock +from types import TracebackType + +import click +from werkzeug.datastructures import Headers +from werkzeug.datastructures import ImmutableDict +from werkzeug.exceptions import Aborter +from werkzeug.exceptions import BadRequest +from werkzeug.exceptions import BadRequestKeyError +from werkzeug.exceptions import HTTPException +from werkzeug.exceptions import InternalServerError +from werkzeug.routing import BuildError +from werkzeug.routing import Map +from werkzeug.routing import MapAdapter +from werkzeug.routing import RequestRedirect +from werkzeug.routing import RoutingException +from werkzeug.routing import Rule +from werkzeug.serving import is_running_from_reloader +from werkzeug.urls import url_quote +from werkzeug.utils import redirect as _wz_redirect +from werkzeug.wrappers import Response as BaseResponse + +from . import cli +from . import typing as ft +from .config import Config +from .config import ConfigAttribute +from .ctx import _AppCtxGlobals +from .ctx import AppContext +from .ctx import RequestContext +from .globals import _cv_app +from .globals import _cv_request +from .globals import g +from .globals import request +from .globals import request_ctx +from .globals import session +from .helpers import _split_blueprint_path +from .helpers import get_debug_flag +from .helpers import get_flashed_messages +from .helpers import get_load_dotenv +from .helpers import locked_cached_property +from .json.provider import DefaultJSONProvider +from .json.provider import JSONProvider +from .logging import create_logger +from .scaffold import _endpoint_from_view_func +from .scaffold import _sentinel +from .scaffold import find_package +from .scaffold import Scaffold +from .scaffold import setupmethod +from .sessions import SecureCookieSessionInterface +from .sessions import SessionInterface +from .signals import appcontext_tearing_down +from .signals import got_request_exception +from .signals import request_finished +from .signals import request_started +from .signals import request_tearing_down +from .templating import DispatchingJinjaLoader +from .templating import Environment +from .wrappers import Request +from .wrappers import Response + +if t.TYPE_CHECKING: # pragma: no cover + import typing_extensions as te + from .blueprints import Blueprint + from .testing import FlaskClient + from .testing import FlaskCliRunner + +T_before_first_request = t.TypeVar( + "T_before_first_request", bound=ft.BeforeFirstRequestCallable +) +T_shell_context_processor = t.TypeVar( + "T_shell_context_processor", bound=ft.ShellContextProcessorCallable +) +T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) +T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) +T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) +T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) + +if sys.version_info >= (3, 8): + iscoroutinefunction = inspect.iscoroutinefunction +else: + + def iscoroutinefunction(func: t.Any) -> bool: + while inspect.ismethod(func): + func = func.__func__ + + while isinstance(func, functools.partial): + func = func.func + + return inspect.iscoroutinefunction(func) + + +def _make_timedelta(value: t.Union[timedelta, int, None]) -> t.Optional[timedelta]: + if value is None or isinstance(value, timedelta): + return value + + return timedelta(seconds=value) + + +class Flask(Scaffold): + """The flask object implements a WSGI application and acts as the central + object. It is passed the name of the module or package of the + application. Once it is created it will act as a central registry for + the view functions, the URL rules, template configuration and much more. + + The name of the package is used to resolve resources from inside the + package or the folder the module is contained in depending on if the + package parameter resolves to an actual python package (a folder with + an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). + + For more information about resource loading, see :func:`open_resource`. + + Usually you create a :class:`Flask` instance in your main module or + in the :file:`__init__.py` file of your package like this:: + + from flask import Flask + app = Flask(__name__) + + .. admonition:: About the First Parameter + + The idea of the first parameter is to give Flask an idea of what + belongs to your application. This name is used to find resources + on the filesystem, can be used by extensions to improve debugging + information and a lot more. + + So it's important what you provide there. If you are using a single + module, `__name__` is always the correct value. If you however are + using a package, it's usually recommended to hardcode the name of + your package there. + + For example if your application is defined in :file:`yourapplication/app.py` + you should create it with one of the two versions below:: + + app = Flask('yourapplication') + app = Flask(__name__.split('.')[0]) + + Why is that? The application will work even with `__name__`, thanks + to how resources are looked up. However it will make debugging more + painful. Certain extensions can make assumptions based on the + import name of your application. For example the Flask-SQLAlchemy + extension will look for the code in your application that triggered + an SQL query in debug mode. If the import name is not properly set + up, that debugging information is lost. (For example it would only + pick up SQL queries in `yourapplication.app` and not + `yourapplication.views.frontend`) + + .. versionadded:: 0.7 + The `static_url_path`, `static_folder`, and `template_folder` + parameters were added. + + .. versionadded:: 0.8 + The `instance_path` and `instance_relative_config` parameters were + added. + + .. versionadded:: 0.11 + The `root_path` parameter was added. + + .. versionadded:: 1.0 + The ``host_matching`` and ``static_host`` parameters were added. + + .. versionadded:: 1.0 + The ``subdomain_matching`` parameter was added. Subdomain + matching needs to be enabled manually now. Setting + :data:`SERVER_NAME` does not implicitly enable it. + + :param import_name: the name of the application package + :param static_url_path: can be used to specify a different path for the + static files on the web. Defaults to the name + of the `static_folder` folder. + :param static_folder: The folder with static files that is served at + ``static_url_path``. Relative to the application ``root_path`` + or an absolute path. Defaults to ``'static'``. + :param static_host: the host to use when adding the static route. + Defaults to None. Required when using ``host_matching=True`` + with a ``static_folder`` configured. + :param host_matching: set ``url_map.host_matching`` attribute. + Defaults to False. + :param subdomain_matching: consider the subdomain relative to + :data:`SERVER_NAME` when matching routes. Defaults to False. + :param template_folder: the folder that contains the templates that should + be used by the application. Defaults to + ``'templates'`` folder in the root path of the + application. + :param instance_path: An alternative instance path for the application. + By default the folder ``'instance'`` next to the + package or module is assumed to be the instance + path. + :param instance_relative_config: if set to ``True`` relative filenames + for loading the config are assumed to + be relative to the instance path instead + of the application root. + :param root_path: The path to the root of the application files. + This should only be set manually when it can't be detected + automatically, such as for namespace packages. + """ + + #: The class that is used for request objects. See :class:`~flask.Request` + #: for more information. + request_class = Request + + #: The class that is used for response objects. See + #: :class:`~flask.Response` for more information. + response_class = Response + + #: The class of the object assigned to :attr:`aborter`, created by + #: :meth:`create_aborter`. That object is called by + #: :func:`flask.abort` to raise HTTP errors, and can be + #: called directly as well. + #: + #: Defaults to :class:`werkzeug.exceptions.Aborter`. + #: + #: .. versionadded:: 2.2 + aborter_class = Aborter + + #: The class that is used for the Jinja environment. + #: + #: .. versionadded:: 0.11 + jinja_environment = Environment + + #: The class that is used for the :data:`~flask.g` instance. + #: + #: Example use cases for a custom class: + #: + #: 1. Store arbitrary attributes on flask.g. + #: 2. Add a property for lazy per-request database connectors. + #: 3. Return None instead of AttributeError on unexpected attributes. + #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. + #: + #: In Flask 0.9 this property was called `request_globals_class` but it + #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the + #: flask.g object is now application context scoped. + #: + #: .. versionadded:: 0.10 + app_ctx_globals_class = _AppCtxGlobals + + #: The class that is used for the ``config`` attribute of this app. + #: Defaults to :class:`~flask.Config`. + #: + #: Example use cases for a custom class: + #: + #: 1. Default values for certain config options. + #: 2. Access to config values through attributes in addition to keys. + #: + #: .. versionadded:: 0.11 + config_class = Config + + #: The testing flag. Set this to ``True`` to enable the test mode of + #: Flask extensions (and in the future probably also Flask itself). + #: For example this might activate test helpers that have an + #: additional runtime cost which should not be enabled by default. + #: + #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the + #: default it's implicitly enabled. + #: + #: This attribute can also be configured from the config with the + #: ``TESTING`` configuration key. Defaults to ``False``. + testing = ConfigAttribute("TESTING") + + #: If a secret key is set, cryptographic components can use this to + #: sign cookies and other things. Set this to a complex random value + #: when you want to use the secure cookie for instance. + #: + #: This attribute can also be configured from the config with the + #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. + secret_key = ConfigAttribute("SECRET_KEY") + + @property + def session_cookie_name(self) -> str: + """The name of the cookie set by the session interface. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.config["SESSION_COOKIE_NAME"]`` + instead. + """ + import warnings + + warnings.warn( + "'session_cookie_name' is deprecated and will be removed in Flask 2.3. Use" + " 'SESSION_COOKIE_NAME' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.config["SESSION_COOKIE_NAME"] + + @session_cookie_name.setter + def session_cookie_name(self, value: str) -> None: + import warnings + + warnings.warn( + "'session_cookie_name' is deprecated and will be removed in Flask 2.3. Use" + " 'SESSION_COOKIE_NAME' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["SESSION_COOKIE_NAME"] = value + + #: A :class:`~datetime.timedelta` which is used to set the expiration + #: date of a permanent session. The default is 31 days which makes a + #: permanent session survive for roughly one month. + #: + #: This attribute can also be configured from the config with the + #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to + #: ``timedelta(days=31)`` + permanent_session_lifetime = ConfigAttribute( + "PERMANENT_SESSION_LIFETIME", get_converter=_make_timedelta + ) + + @property + def send_file_max_age_default(self) -> t.Optional[timedelta]: + """The default value for ``max_age`` for :func:`~flask.send_file`. The default + is ``None``, which tells the browser to use conditional requests instead of a + timed cache. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use + ``app.config["SEND_FILE_MAX_AGE_DEFAULT"]`` instead. + + .. versionchanged:: 2.0 + Defaults to ``None`` instead of 12 hours. + """ + import warnings + + warnings.warn( + "'send_file_max_age_default' is deprecated and will be removed in Flask" + " 2.3. Use 'SEND_FILE_MAX_AGE_DEFAULT' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _make_timedelta(self.config["SEND_FILE_MAX_AGE_DEFAULT"]) + + @send_file_max_age_default.setter + def send_file_max_age_default(self, value: t.Union[int, timedelta, None]) -> None: + import warnings + + warnings.warn( + "'send_file_max_age_default' is deprecated and will be removed in Flask" + " 2.3. Use 'SEND_FILE_MAX_AGE_DEFAULT' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["SEND_FILE_MAX_AGE_DEFAULT"] = _make_timedelta(value) + + @property + def use_x_sendfile(self) -> bool: + """Enable this to use the ``X-Sendfile`` feature, assuming the server supports + it, from :func:`~flask.send_file`. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.config["USE_X_SENDFILE"]`` instead. + """ + import warnings + + warnings.warn( + "'use_x_sendfile' is deprecated and will be removed in Flask 2.3. Use" + " 'USE_X_SENDFILE' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.config["USE_X_SENDFILE"] + + @use_x_sendfile.setter + def use_x_sendfile(self, value: bool) -> None: + import warnings + + warnings.warn( + "'use_x_sendfile' is deprecated and will be removed in Flask 2.3. Use" + " 'USE_X_SENDFILE' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["USE_X_SENDFILE"] = value + + _json_encoder: t.Union[t.Type[json.JSONEncoder], None] = None + _json_decoder: t.Union[t.Type[json.JSONDecoder], None] = None + + @property # type: ignore[override] + def json_encoder(self) -> t.Type[json.JSONEncoder]: # type: ignore[override] + """The JSON encoder class to use. Defaults to + :class:`~flask.json.JSONEncoder`. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Customize + :attr:`json_provider_class` instead. + + .. versionadded:: 0.10 + """ + import warnings + + warnings.warn( + "'app.json_encoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if self._json_encoder is None: + from . import json + + return json.JSONEncoder + + return self._json_encoder + + @json_encoder.setter + def json_encoder(self, value: t.Type[json.JSONEncoder]) -> None: + import warnings + + warnings.warn( + "'app.json_encoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._json_encoder = value + + @property # type: ignore[override] + def json_decoder(self) -> t.Type[json.JSONDecoder]: # type: ignore[override] + """The JSON decoder class to use. Defaults to + :class:`~flask.json.JSONDecoder`. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Customize + :attr:`json_provider_class` instead. + + .. versionadded:: 0.10 + """ + import warnings + + warnings.warn( + "'app.json_decoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if self._json_decoder is None: + from . import json + + return json.JSONDecoder + + return self._json_decoder + + @json_decoder.setter + def json_decoder(self, value: t.Type[json.JSONDecoder]) -> None: + import warnings + + warnings.warn( + "'app.json_decoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._json_decoder = value + + json_provider_class: t.Type[JSONProvider] = DefaultJSONProvider + """A subclass of :class:`~flask.json.provider.JSONProvider`. An + instance is created and assigned to :attr:`app.json` when creating + the app. + + The default, :class:`~flask.json.provider.DefaultJSONProvider`, uses + Python's built-in :mod:`json` library. A different provider can use + a different JSON library. + + .. versionadded:: 2.2 + """ + + #: Options that are passed to the Jinja environment in + #: :meth:`create_jinja_environment`. Changing these options after + #: the environment is created (accessing :attr:`jinja_env`) will + #: have no effect. + #: + #: .. versionchanged:: 1.1.0 + #: This is a ``dict`` instead of an ``ImmutableDict`` to allow + #: easier configuration. + #: + jinja_options: dict = {} + + #: Default configuration parameters. + default_config = ImmutableDict( + { + "ENV": None, + "DEBUG": None, + "TESTING": False, + "PROPAGATE_EXCEPTIONS": None, + "SECRET_KEY": None, + "PERMANENT_SESSION_LIFETIME": timedelta(days=31), + "USE_X_SENDFILE": False, + "SERVER_NAME": None, + "APPLICATION_ROOT": "/", + "SESSION_COOKIE_NAME": "session", + "SESSION_COOKIE_DOMAIN": None, + "SESSION_COOKIE_PATH": None, + "SESSION_COOKIE_HTTPONLY": True, + "SESSION_COOKIE_SECURE": False, + "SESSION_COOKIE_SAMESITE": None, + "SESSION_REFRESH_EACH_REQUEST": True, + "MAX_CONTENT_LENGTH": None, + "SEND_FILE_MAX_AGE_DEFAULT": None, + "TRAP_BAD_REQUEST_ERRORS": None, + "TRAP_HTTP_EXCEPTIONS": False, + "EXPLAIN_TEMPLATE_LOADING": False, + "PREFERRED_URL_SCHEME": "http", + "JSON_AS_ASCII": None, + "JSON_SORT_KEYS": None, + "JSONIFY_PRETTYPRINT_REGULAR": None, + "JSONIFY_MIMETYPE": None, + "TEMPLATES_AUTO_RELOAD": None, + "MAX_COOKIE_SIZE": 4093, + } + ) + + #: The rule object to use for URL rules created. This is used by + #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. + #: + #: .. versionadded:: 0.7 + url_rule_class = Rule + + #: The map object to use for storing the URL rules and routing + #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. + #: + #: .. versionadded:: 1.1.0 + url_map_class = Map + + #: The :meth:`test_client` method creates an instance of this test + #: client class. Defaults to :class:`~flask.testing.FlaskClient`. + #: + #: .. versionadded:: 0.7 + test_client_class: t.Optional[t.Type["FlaskClient"]] = None + + #: The :class:`~click.testing.CliRunner` subclass, by default + #: :class:`~flask.testing.FlaskCliRunner` that is used by + #: :meth:`test_cli_runner`. Its ``__init__`` method should take a + #: Flask app object as the first argument. + #: + #: .. versionadded:: 1.0 + test_cli_runner_class: t.Optional[t.Type["FlaskCliRunner"]] = None + + #: the session interface to use. By default an instance of + #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. + #: + #: .. versionadded:: 0.8 + session_interface: SessionInterface = SecureCookieSessionInterface() + + def __init__( + self, + import_name: str, + static_url_path: t.Optional[str] = None, + static_folder: t.Optional[t.Union[str, os.PathLike]] = "static", + static_host: t.Optional[str] = None, + host_matching: bool = False, + subdomain_matching: bool = False, + template_folder: t.Optional[str] = "templates", + instance_path: t.Optional[str] = None, + instance_relative_config: bool = False, + root_path: t.Optional[str] = None, + ): + super().__init__( + import_name=import_name, + static_folder=static_folder, + static_url_path=static_url_path, + template_folder=template_folder, + root_path=root_path, + ) + + if instance_path is None: + instance_path = self.auto_find_instance_path() + elif not os.path.isabs(instance_path): + raise ValueError( + "If an instance path is provided it must be absolute." + " A relative path was given instead." + ) + + #: Holds the path to the instance folder. + #: + #: .. versionadded:: 0.8 + self.instance_path = instance_path + + #: The configuration dictionary as :class:`Config`. This behaves + #: exactly like a regular dictionary but supports additional methods + #: to load a config from files. + self.config = self.make_config(instance_relative_config) + + #: An instance of :attr:`aborter_class` created by + #: :meth:`make_aborter`. This is called by :func:`flask.abort` + #: to raise HTTP errors, and can be called directly as well. + #: + #: .. versionadded:: 2.2 + #: Moved from ``flask.abort``, which calls this object. + self.aborter = self.make_aborter() + + self.json: JSONProvider = self.json_provider_class(self) + """Provides access to JSON methods. Functions in ``flask.json`` + will call methods on this provider when the application context + is active. Used for handling JSON requests and responses. + + An instance of :attr:`json_provider_class`. Can be customized by + changing that attribute on a subclass, or by assigning to this + attribute afterwards. + + The default, :class:`~flask.json.provider.DefaultJSONProvider`, + uses Python's built-in :mod:`json` library. A different provider + can use a different JSON library. + + .. versionadded:: 2.2 + """ + + #: A list of functions that are called by + #: :meth:`handle_url_build_error` when :meth:`.url_for` raises a + #: :exc:`~werkzeug.routing.BuildError`. Each function is called + #: with ``error``, ``endpoint`` and ``values``. If a function + #: returns ``None`` or raises a ``BuildError``, it is skipped. + #: Otherwise, its return value is returned by ``url_for``. + #: + #: .. versionadded:: 0.9 + self.url_build_error_handlers: t.List[ + t.Callable[[Exception, str, t.Dict[str, t.Any]], str] + ] = [] + + #: A list of functions that will be called at the beginning of the + #: first request to this instance. To register a function, use the + #: :meth:`before_first_request` decorator. + #: + #: .. deprecated:: 2.2 + #: Will be removed in Flask 2.3. Run setup code when + #: creating the application instead. + #: + #: .. versionadded:: 0.8 + self.before_first_request_funcs: t.List[ft.BeforeFirstRequestCallable] = [] + + #: A list of functions that are called when the application context + #: is destroyed. Since the application context is also torn down + #: if the request ends this is the place to store code that disconnects + #: from databases. + #: + #: .. versionadded:: 0.9 + self.teardown_appcontext_funcs: t.List[ft.TeardownCallable] = [] + + #: A list of shell context processor functions that should be run + #: when a shell context is created. + #: + #: .. versionadded:: 0.11 + self.shell_context_processors: t.List[ft.ShellContextProcessorCallable] = [] + + #: Maps registered blueprint names to blueprint objects. The + #: dict retains the order the blueprints were registered in. + #: Blueprints can be registered multiple times, this dict does + #: not track how often they were attached. + #: + #: .. versionadded:: 0.7 + self.blueprints: t.Dict[str, "Blueprint"] = {} + + #: a place where extensions can store application specific state. For + #: example this is where an extension could store database engines and + #: similar things. + #: + #: The key must match the name of the extension module. For example in + #: case of a "Flask-Foo" extension in `flask_foo`, the key would be + #: ``'foo'``. + #: + #: .. versionadded:: 0.7 + self.extensions: dict = {} + + #: The :class:`~werkzeug.routing.Map` for this instance. You can use + #: this to change the routing converters after the class was created + #: but before any routes are connected. Example:: + #: + #: from werkzeug.routing import BaseConverter + #: + #: class ListConverter(BaseConverter): + #: def to_python(self, value): + #: return value.split(',') + #: def to_url(self, values): + #: return ','.join(super(ListConverter, self).to_url(value) + #: for value in values) + #: + #: app = Flask(__name__) + #: app.url_map.converters['list'] = ListConverter + self.url_map = self.url_map_class() + + self.url_map.host_matching = host_matching + self.subdomain_matching = subdomain_matching + + # tracks internally if the application already handled at least one + # request. + self._got_first_request = False + self._before_request_lock = Lock() + + # Add a static route using the provided static_url_path, static_host, + # and static_folder if there is a configured static_folder. + # Note we do this without checking if static_folder exists. + # For one, it might be created while the server is running (e.g. during + # development). Also, Google App Engine stores static files somewhere + if self.has_static_folder: + assert ( + bool(static_host) == host_matching + ), "Invalid static_host/host_matching combination" + # Use a weakref to avoid creating a reference cycle between the app + # and the view function (see #3761). + self_ref = weakref.ref(self) + self.add_url_rule( + f"{self.static_url_path}/", + endpoint="static", + host=static_host, + view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 + ) + + # Set the name of the Click group in case someone wants to add + # the app's commands to another CLI tool. + self.cli.name = self.name + + def _check_setup_finished(self, f_name: str) -> None: + if self._got_first_request: + raise AssertionError( + f"The setup method '{f_name}' can no longer be called" + " on the application. It has already handled its first" + " request, any changes will not be applied" + " consistently.\n" + "Make sure all imports, decorators, functions, etc." + " needed to set up the application are done before" + " running it." + ) + + @locked_cached_property + def name(self) -> str: # type: ignore + """The name of the application. This is usually the import name + with the difference that it's guessed from the run file if the + import name is main. This name is used as a display name when + Flask needs the name of the application. It can be set and overridden + to change the value. + + .. versionadded:: 0.8 + """ + if self.import_name == "__main__": + fn = getattr(sys.modules["__main__"], "__file__", None) + if fn is None: + return "__main__" + return os.path.splitext(os.path.basename(fn))[0] + return self.import_name + + @property + def propagate_exceptions(self) -> bool: + """Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration + value in case it's set, otherwise a sensible default is returned. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. + + .. versionadded:: 0.7 + """ + import warnings + + warnings.warn( + "'propagate_exceptions' is deprecated and will be removed in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + rv = self.config["PROPAGATE_EXCEPTIONS"] + if rv is not None: + return rv + return self.testing or self.debug + + @locked_cached_property + def logger(self) -> logging.Logger: + """A standard Python :class:`~logging.Logger` for the app, with + the same name as :attr:`name`. + + In debug mode, the logger's :attr:`~logging.Logger.level` will + be set to :data:`~logging.DEBUG`. + + If there are no handlers configured, a default handler will be + added. See :doc:`/logging` for more information. + + .. versionchanged:: 1.1.0 + The logger takes the same name as :attr:`name` rather than + hard-coding ``"flask.app"``. + + .. versionchanged:: 1.0.0 + Behavior was simplified. The logger is always named + ``"flask.app"``. The level is only set during configuration, + it doesn't check ``app.debug`` each time. Only one format is + used, not different ones depending on ``app.debug``. No + handlers are removed, and a handler is only added if no + handlers are already configured. + + .. versionadded:: 0.3 + """ + return create_logger(self) + + @locked_cached_property + def jinja_env(self) -> Environment: + """The Jinja environment used to load templates. + + The environment is created the first time this property is + accessed. Changing :attr:`jinja_options` after that will have no + effect. + """ + return self.create_jinja_environment() + + @property + def got_first_request(self) -> bool: + """This attribute is set to ``True`` if the application started + handling the first request. + + .. versionadded:: 0.8 + """ + return self._got_first_request + + def make_config(self, instance_relative: bool = False) -> Config: + """Used to create the config attribute by the Flask constructor. + The `instance_relative` parameter is passed in from the constructor + of Flask (there named `instance_relative_config`) and indicates if + the config should be relative to the instance path or the root path + of the application. + + .. versionadded:: 0.8 + """ + root_path = self.root_path + if instance_relative: + root_path = self.instance_path + defaults = dict(self.default_config) + defaults["ENV"] = os.environ.get("FLASK_ENV") or "production" + defaults["DEBUG"] = get_debug_flag() + return self.config_class(root_path, defaults) + + def make_aborter(self) -> Aborter: + """Create the object to assign to :attr:`aborter`. That object + is called by :func:`flask.abort` to raise HTTP errors, and can + be called directly as well. + + By default, this creates an instance of :attr:`aborter_class`, + which defaults to :class:`werkzeug.exceptions.Aborter`. + + .. versionadded:: 2.2 + """ + return self.aborter_class() + + def auto_find_instance_path(self) -> str: + """Tries to locate the instance path if it was not provided to the + constructor of the application class. It will basically calculate + the path to a folder named ``instance`` next to your main file or + the package. + + .. versionadded:: 0.8 + """ + prefix, package_path = find_package(self.import_name) + if prefix is None: + return os.path.join(package_path, "instance") + return os.path.join(prefix, "var", f"{self.name}-instance") + + def open_instance_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: + """Opens a resource from the application's instance folder + (:attr:`instance_path`). Otherwise works like + :meth:`open_resource`. Instance resources can also be opened for + writing. + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: resource file opening mode, default is 'rb'. + """ + return open(os.path.join(self.instance_path, resource), mode) + + @property + def templates_auto_reload(self) -> bool: + """Reload templates when they are changed. Used by + :meth:`create_jinja_environment`. It is enabled by default in debug mode. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.config["TEMPLATES_AUTO_RELOAD"]`` + instead. + + .. versionadded:: 1.0 + This property was added but the underlying config and behavior + already existed. + """ + import warnings + + warnings.warn( + "'templates_auto_reload' is deprecated and will be removed in Flask 2.3." + " Use 'TEMPLATES_AUTO_RELOAD' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + rv = self.config["TEMPLATES_AUTO_RELOAD"] + return rv if rv is not None else self.debug + + @templates_auto_reload.setter + def templates_auto_reload(self, value: bool) -> None: + import warnings + + warnings.warn( + "'templates_auto_reload' is deprecated and will be removed in Flask 2.3." + " Use 'TEMPLATES_AUTO_RELOAD' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["TEMPLATES_AUTO_RELOAD"] = value + + def create_jinja_environment(self) -> Environment: + """Create the Jinja environment based on :attr:`jinja_options` + and the various Jinja-related methods of the app. Changing + :attr:`jinja_options` after this will have no effect. Also adds + Flask-related globals and filters to the environment. + + .. versionchanged:: 0.11 + ``Environment.auto_reload`` set in accordance with + ``TEMPLATES_AUTO_RELOAD`` configuration option. + + .. versionadded:: 0.5 + """ + options = dict(self.jinja_options) + + if "autoescape" not in options: + options["autoescape"] = self.select_jinja_autoescape + + if "auto_reload" not in options: + auto_reload = self.config["TEMPLATES_AUTO_RELOAD"] + + if auto_reload is None: + auto_reload = self.debug + + options["auto_reload"] = auto_reload + + rv = self.jinja_environment(self, **options) + rv.globals.update( + url_for=self.url_for, + get_flashed_messages=get_flashed_messages, + config=self.config, + # request, session and g are normally added with the + # context processor for efficiency reasons but for imported + # templates we also want the proxies in there. + request=request, + session=session, + g=g, + ) + rv.policies["json.dumps_function"] = self.json.dumps + return rv + + def create_global_jinja_loader(self) -> DispatchingJinjaLoader: + """Creates the loader for the Jinja2 environment. Can be used to + override just the loader and keeping the rest unchanged. It's + discouraged to override this function. Instead one should override + the :meth:`jinja_loader` function instead. + + The global loader dispatches between the loaders of the application + and the individual blueprints. + + .. versionadded:: 0.7 + """ + return DispatchingJinjaLoader(self) + + def select_jinja_autoescape(self, filename: str) -> bool: + """Returns ``True`` if autoescaping should be active for the given + template name. If no template name is given, returns `True`. + + .. versionadded:: 0.5 + """ + if filename is None: + return True + return filename.endswith((".html", ".htm", ".xml", ".xhtml")) + + def update_template_context(self, context: dict) -> None: + """Update the template context with some commonly used variables. + This injects request, session, config and g into the template + context as well as everything template context processors want + to inject. Note that the as of Flask 0.6, the original values + in the context will not be overridden if a context processor + decides to return a value with the same key. + + :param context: the context as a dictionary that is updated in place + to add extra variables. + """ + names: t.Iterable[t.Optional[str]] = (None,) + + # A template may be rendered outside a request context. + if request: + names = chain(names, reversed(request.blueprints)) + + # The values passed to render_template take precedence. Keep a + # copy to re-apply after all context functions. + orig_ctx = context.copy() + + for name in names: + if name in self.template_context_processors: + for func in self.template_context_processors[name]: + context.update(func()) + + context.update(orig_ctx) + + def make_shell_context(self) -> dict: + """Returns the shell context for an interactive shell for this + application. This runs all the registered shell context + processors. + + .. versionadded:: 0.11 + """ + rv = {"app": self, "g": g} + for processor in self.shell_context_processors: + rv.update(processor()) + return rv + + @property + def env(self) -> str: + """What environment the app is running in. This maps to the :data:`ENV` config + key. + + **Do not enable development when deploying in production.** + + Default: ``'production'`` + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. + """ + import warnings + + warnings.warn( + "'app.env' is deprecated and will be removed in Flask 2.3." + " Use 'app.debug' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.config["ENV"] + + @env.setter + def env(self, value: str) -> None: + import warnings + + warnings.warn( + "'app.env' is deprecated and will be removed in Flask 2.3." + " Use 'app.debug' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["ENV"] = value + + @property + def debug(self) -> bool: + """Whether debug mode is enabled. When using ``flask run`` to start the + development server, an interactive debugger will be shown for unhandled + exceptions, and the server will be reloaded when code changes. This maps to the + :data:`DEBUG` config key. It may not behave as expected if set late. + + **Do not enable debug mode when deploying in production.** + + Default: ``False`` + """ + return self.config["DEBUG"] + + @debug.setter + def debug(self, value: bool) -> None: + self.config["DEBUG"] = value + + if self.config["TEMPLATES_AUTO_RELOAD"] is None: + self.jinja_env.auto_reload = value + + def run( + self, + host: t.Optional[str] = None, + port: t.Optional[int] = None, + debug: t.Optional[bool] = None, + load_dotenv: bool = True, + **options: t.Any, + ) -> None: + """Runs the application on a local development server. + + Do not use ``run()`` in a production setting. It is not intended to + meet security and performance requirements for a production server. + Instead, see :doc:`/deploying/index` for WSGI server recommendations. + + If the :attr:`debug` flag is set the server will automatically reload + for code changes and show a debugger in case an exception happened. + + If you want to run the application in debug mode, but disable the + code execution on the interactive debugger, you can pass + ``use_evalex=False`` as parameter. This will keep the debugger's + traceback screen active, but disable code execution. + + It is not recommended to use this function for development with + automatic reloading as this is badly supported. Instead you should + be using the :command:`flask` command line script's ``run`` support. + + .. admonition:: Keep in Mind + + Flask will suppress any server error with a generic error page + unless it is in debug mode. As such to enable just the + interactive debugger without the code reloading, you have to + invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. + Setting ``use_debugger`` to ``True`` without being in debug mode + won't catch any exceptions because there won't be any to + catch. + + :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to + have the server available externally as well. Defaults to + ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable + if present. + :param port: the port of the webserver. Defaults to ``5000`` or the + port defined in the ``SERVER_NAME`` config variable if present. + :param debug: if given, enable or disable debug mode. See + :attr:`debug`. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param options: the options to be forwarded to the underlying Werkzeug + server. See :func:`werkzeug.serving.run_simple` for more + information. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment + variables from :file:`.env` and :file:`.flaskenv` files. + + The :envvar:`FLASK_DEBUG` environment variable will override :attr:`debug`. + + Threaded mode is enabled by default. + + .. versionchanged:: 0.10 + The default port is now picked from the ``SERVER_NAME`` + variable. + """ + # Ignore this call so that it doesn't start another server if + # the 'flask run' command is used. + if os.environ.get("FLASK_RUN_FROM_CLI") == "true": + if not is_running_from_reloader(): + click.secho( + " * Ignoring a call to 'app.run()' that would block" + " the current 'flask' CLI command.\n" + " Only call 'app.run()' in an 'if __name__ ==" + ' "__main__"\' guard.', + fg="red", + ) + + return + + if get_load_dotenv(load_dotenv): + cli.load_dotenv() + + # if set, let env vars override previous values + if "FLASK_ENV" in os.environ: + print( + "'FLASK_ENV' is deprecated and will not be used in" + " Flask 2.3. Use 'FLASK_DEBUG' instead.", + file=sys.stderr, + ) + self.config["ENV"] = os.environ.get("FLASK_ENV") or "production" + self.debug = get_debug_flag() + elif "FLASK_DEBUG" in os.environ: + self.debug = get_debug_flag() + + # debug passed to method overrides all other sources + if debug is not None: + self.debug = bool(debug) + + server_name = self.config.get("SERVER_NAME") + sn_host = sn_port = None + + if server_name: + sn_host, _, sn_port = server_name.partition(":") + + if not host: + if sn_host: + host = sn_host + else: + host = "127.0.0.1" + + if port or port == 0: + port = int(port) + elif sn_port: + port = int(sn_port) + else: + port = 5000 + + options.setdefault("use_reloader", self.debug) + options.setdefault("use_debugger", self.debug) + options.setdefault("threaded", True) + + cli.show_server_banner(self.debug, self.name) + + from werkzeug.serving import run_simple + + try: + run_simple(t.cast(str, host), port, self, **options) + finally: + # reset the first request information if the development server + # reset normally. This makes it possible to restart the server + # without reloader and that stuff from an interactive shell. + self._got_first_request = False + + def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> "FlaskClient": + """Creates a test client for this application. For information + about unit testing head over to :doc:`/testing`. + + Note that if you are testing for assertions or exceptions in your + application code, you must set ``app.testing = True`` in order for the + exceptions to propagate to the test client. Otherwise, the exception + will be handled by the application (not visible to the test client) and + the only indication of an AssertionError or other exception will be a + 500 status code response to the test client. See the :attr:`testing` + attribute. For example:: + + app.testing = True + client = app.test_client() + + The test client can be used in a ``with`` block to defer the closing down + of the context until the end of the ``with`` block. This is useful if + you want to access the context locals for testing:: + + with app.test_client() as c: + rv = c.get('/?vodka=42') + assert request.args['vodka'] == '42' + + Additionally, you may pass optional keyword arguments that will then + be passed to the application's :attr:`test_client_class` constructor. + For example:: + + from flask.testing import FlaskClient + + class CustomClient(FlaskClient): + def __init__(self, *args, **kwargs): + self._authentication = kwargs.pop("authentication") + super(CustomClient,self).__init__( *args, **kwargs) + + app.test_client_class = CustomClient + client = app.test_client(authentication='Basic ....') + + See :class:`~flask.testing.FlaskClient` for more information. + + .. versionchanged:: 0.4 + added support for ``with`` block usage for the client. + + .. versionadded:: 0.7 + The `use_cookies` parameter was added as well as the ability + to override the client to be used by setting the + :attr:`test_client_class` attribute. + + .. versionchanged:: 0.11 + Added `**kwargs` to support passing additional keyword arguments to + the constructor of :attr:`test_client_class`. + """ + cls = self.test_client_class + if cls is None: + from .testing import FlaskClient as cls # type: ignore + return cls( # type: ignore + self, self.response_class, use_cookies=use_cookies, **kwargs + ) + + def test_cli_runner(self, **kwargs: t.Any) -> "FlaskCliRunner": + """Create a CLI runner for testing CLI commands. + See :ref:`testing-cli`. + + Returns an instance of :attr:`test_cli_runner_class`, by default + :class:`~flask.testing.FlaskCliRunner`. The Flask app object is + passed as the first argument. + + .. versionadded:: 1.0 + """ + cls = self.test_cli_runner_class + + if cls is None: + from .testing import FlaskCliRunner as cls # type: ignore + + return cls(self, **kwargs) # type: ignore + + @setupmethod + def register_blueprint(self, blueprint: "Blueprint", **options: t.Any) -> None: + """Register a :class:`~flask.Blueprint` on the application. Keyword + arguments passed to this method will override the defaults set on the + blueprint. + + Calls the blueprint's :meth:`~flask.Blueprint.register` method after + recording the blueprint in the application's :attr:`blueprints`. + + :param blueprint: The blueprint to register. + :param url_prefix: Blueprint routes will be prefixed with this. + :param subdomain: Blueprint routes will match on this subdomain. + :param url_defaults: Blueprint routes will use these default values for + view arguments. + :param options: Additional keyword arguments are passed to + :class:`~flask.blueprints.BlueprintSetupState`. They can be + accessed in :meth:`~flask.Blueprint.record` callbacks. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionadded:: 0.7 + """ + blueprint.register(self, options) + + def iter_blueprints(self) -> t.ValuesView["Blueprint"]: + """Iterates over all blueprints by the order they were registered. + + .. versionadded:: 0.11 + """ + return self.blueprints.values() + + @setupmethod + def add_url_rule( + self, + rule: str, + endpoint: t.Optional[str] = None, + view_func: t.Optional[ft.RouteCallable] = None, + provide_automatic_options: t.Optional[bool] = None, + **options: t.Any, + ) -> None: + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) # type: ignore + options["endpoint"] = endpoint + methods = options.pop("methods", None) + + # if the methods are not given and the view_func object knows its + # methods we can use that instead. If neither exists, we go with + # a tuple of only ``GET`` as default. + if methods is None: + methods = getattr(view_func, "methods", None) or ("GET",) + if isinstance(methods, str): + raise TypeError( + "Allowed methods must be a list of strings, for" + ' example: @app.route(..., methods=["POST"])' + ) + methods = {item.upper() for item in methods} + + # Methods that should always be added + required_methods = set(getattr(view_func, "required_methods", ())) + + # starting with Flask 0.8 the view_func object can disable and + # force-enable the automatic options handling. + if provide_automatic_options is None: + provide_automatic_options = getattr( + view_func, "provide_automatic_options", None + ) + + if provide_automatic_options is None: + if "OPTIONS" not in methods: + provide_automatic_options = True + required_methods.add("OPTIONS") + else: + provide_automatic_options = False + + # Add the required methods now. + methods |= required_methods + + rule = self.url_rule_class(rule, methods=methods, **options) + rule.provide_automatic_options = provide_automatic_options # type: ignore + + self.url_map.add(rule) + if view_func is not None: + old_func = self.view_functions.get(endpoint) + if old_func is not None and old_func != view_func: + raise AssertionError( + "View function mapping is overwriting an existing" + f" endpoint function: {endpoint}" + ) + self.view_functions[endpoint] = view_func + + @setupmethod + def template_filter( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_filter], T_template_filter]: + """A decorator that is used to register custom template filter. + You can specify a name for the filter, otherwise the function + name will be used. Example:: + + @app.template_filter() + def reverse(s): + return s[::-1] + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f: T_template_filter) -> T_template_filter: + self.add_template_filter(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_filter( + self, f: ft.TemplateFilterCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template filter. Works exactly like the + :meth:`template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + self.jinja_env.filters[name or f.__name__] = f + + @setupmethod + def template_test( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_test], T_template_test]: + """A decorator that is used to register custom template test. + You can specify a name for the test, otherwise the function + name will be used. Example:: + + @app.template_test() + def is_prime(n): + if n == 2: + return True + for i in range(2, int(math.ceil(math.sqrt(n))) + 1): + if n % i == 0: + return False + return True + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f: T_template_test) -> T_template_test: + self.add_template_test(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_test( + self, f: ft.TemplateTestCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template test. Works exactly like the + :meth:`template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + self.jinja_env.tests[name or f.__name__] = f + + @setupmethod + def template_global( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_global], T_template_global]: + """A decorator that is used to register a custom template global function. + You can specify a name for the global function, otherwise the function + name will be used. Example:: + + @app.template_global() + def double(n): + return 2 * n + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + + def decorator(f: T_template_global) -> T_template_global: + self.add_template_global(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_global( + self, f: ft.TemplateGlobalCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template global function. Works exactly like the + :meth:`template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + self.jinja_env.globals[name or f.__name__] = f + + @setupmethod + def before_first_request(self, f: T_before_first_request) -> T_before_first_request: + """Registers a function to be run before the first request to this + instance of the application. + + The function will be called without any arguments and its return + value is ignored. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Run setup code when creating + the application instead. + + .. versionadded:: 0.8 + """ + import warnings + + warnings.warn( + "'before_first_request' is deprecated and will be removed" + " in Flask 2.3. Run setup code while creating the" + " application instead.", + DeprecationWarning, + stacklevel=2, + ) + self.before_first_request_funcs.append(f) + return f + + @setupmethod + def teardown_appcontext(self, f: T_teardown) -> T_teardown: + """Registers a function to be called when the application + context is popped. The application context is typically popped + after the request context for each request, at the end of CLI + commands, or after a manually pushed context ends. + + .. code-block:: python + + with app.app_context(): + ... + + When the ``with`` block exits (or ``ctx.pop()`` is called), the + teardown functions are called just before the app context is + made inactive. Since a request context typically also manages an + application context it would also be called when you pop a + request context. + + When a teardown function was called because of an unhandled + exception it will be passed an error object. If an + :meth:`errorhandler` is registered, it will handle the exception + and the teardown will not receive it. + + Teardown functions must avoid raising exceptions. If they + execute code that might fail they must surround that code with a + ``try``/``except`` block and log any errors. + + The return values of teardown functions are ignored. + + .. versionadded:: 0.9 + """ + self.teardown_appcontext_funcs.append(f) + return f + + @setupmethod + def shell_context_processor( + self, f: T_shell_context_processor + ) -> T_shell_context_processor: + """Registers a shell context processor function. + + .. versionadded:: 0.11 + """ + self.shell_context_processors.append(f) + return f + + def _find_error_handler(self, e: Exception) -> t.Optional[ft.ErrorHandlerCallable]: + """Return a registered error handler for an exception in this order: + blueprint handler for a specific code, app handler for a specific code, + blueprint handler for an exception class, app handler for an exception + class, or ``None`` if a suitable handler is not found. + """ + exc_class, code = self._get_exc_class_and_code(type(e)) + names = (*request.blueprints, None) + + for c in (code, None) if code is not None else (None,): + for name in names: + handler_map = self.error_handler_spec[name][c] + + if not handler_map: + continue + + for cls in exc_class.__mro__: + handler = handler_map.get(cls) + + if handler is not None: + return handler + return None + + def handle_http_exception( + self, e: HTTPException + ) -> t.Union[HTTPException, ft.ResponseReturnValue]: + """Handles an HTTP exception. By default this will invoke the + registered error handlers and fall back to returning the + exception as response. + + .. versionchanged:: 1.0.3 + ``RoutingException``, used internally for actions such as + slash redirects during routing, is not passed to error + handlers. + + .. versionchanged:: 1.0 + Exceptions are looked up by code *and* by MRO, so + ``HTTPException`` subclasses can be handled with a catch-all + handler for the base ``HTTPException``. + + .. versionadded:: 0.3 + """ + # Proxy exceptions don't have error codes. We want to always return + # those unchanged as errors + if e.code is None: + return e + + # RoutingExceptions are used internally to trigger routing + # actions, such as slash redirects raising RequestRedirect. They + # are not raised or handled in user code. + if isinstance(e, RoutingException): + return e + + handler = self._find_error_handler(e) + if handler is None: + return e + return self.ensure_sync(handler)(e) + + def trap_http_exception(self, e: Exception) -> bool: + """Checks if an HTTP exception should be trapped or not. By default + this will return ``False`` for all exceptions except for a bad request + key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It + also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. + + This is called for all HTTP exceptions raised by a view function. + If it returns ``True`` for any exception the error handler for this + exception is not called and it shows up as regular exception in the + traceback. This is helpful for debugging implicitly raised HTTP + exceptions. + + .. versionchanged:: 1.0 + Bad request errors are not trapped by default in debug mode. + + .. versionadded:: 0.8 + """ + if self.config["TRAP_HTTP_EXCEPTIONS"]: + return True + + trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] + + # if unset, trap key errors in debug mode + if ( + trap_bad_request is None + and self.debug + and isinstance(e, BadRequestKeyError) + ): + return True + + if trap_bad_request: + return isinstance(e, BadRequest) + + return False + + def handle_user_exception( + self, e: Exception + ) -> t.Union[HTTPException, ft.ResponseReturnValue]: + """This method is called whenever an exception occurs that + should be handled. A special case is :class:`~werkzeug + .exceptions.HTTPException` which is forwarded to the + :meth:`handle_http_exception` method. This function will either + return a response value or reraise the exception with the same + traceback. + + .. versionchanged:: 1.0 + Key errors raised from request data like ``form`` show the + bad key in debug mode rather than a generic bad request + message. + + .. versionadded:: 0.7 + """ + if isinstance(e, BadRequestKeyError) and ( + self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] + ): + e.show_exception = True + + if isinstance(e, HTTPException) and not self.trap_http_exception(e): + return self.handle_http_exception(e) + + handler = self._find_error_handler(e) + + if handler is None: + raise + + return self.ensure_sync(handler)(e) + + def handle_exception(self, e: Exception) -> Response: + """Handle an exception that did not have an error handler + associated with it, or that was raised from an error handler. + This always causes a 500 ``InternalServerError``. + + Always sends the :data:`got_request_exception` signal. + + If :attr:`propagate_exceptions` is ``True``, such as in debug + mode, the error will be re-raised so that the debugger can + display it. Otherwise, the original exception is logged, and + an :exc:`~werkzeug.exceptions.InternalServerError` is returned. + + If an error handler is registered for ``InternalServerError`` or + ``500``, it will be used. For consistency, the handler will + always receive the ``InternalServerError``. The original + unhandled exception is available as ``e.original_exception``. + + .. versionchanged:: 1.1.0 + Always passes the ``InternalServerError`` instance to the + handler, setting ``original_exception`` to the unhandled + error. + + .. versionchanged:: 1.1.0 + ``after_request`` functions and other finalization is done + even for the default 500 response when there is no handler. + + .. versionadded:: 0.3 + """ + exc_info = sys.exc_info() + got_request_exception.send(self, exception=e) + propagate = self.config["PROPAGATE_EXCEPTIONS"] + + if propagate is None: + propagate = self.testing or self.debug + + if propagate: + # Re-raise if called with an active exception, otherwise + # raise the passed in exception. + if exc_info[1] is e: + raise + + raise e + + self.log_exception(exc_info) + server_error: t.Union[InternalServerError, ft.ResponseReturnValue] + server_error = InternalServerError(original_exception=e) + handler = self._find_error_handler(server_error) + + if handler is not None: + server_error = self.ensure_sync(handler)(server_error) + + return self.finalize_request(server_error, from_error_handler=True) + + def log_exception( + self, + exc_info: t.Union[ + t.Tuple[type, BaseException, TracebackType], t.Tuple[None, None, None] + ], + ) -> None: + """Logs an exception. This is called by :meth:`handle_exception` + if debugging is disabled and right before the handler is called. + The default implementation logs the exception as error on the + :attr:`logger`. + + .. versionadded:: 0.8 + """ + self.logger.error( + f"Exception on {request.path} [{request.method}]", exc_info=exc_info + ) + + def raise_routing_exception(self, request: Request) -> "te.NoReturn": + """Intercept routing exceptions and possibly do something else. + + In debug mode, intercept a routing redirect and replace it with + an error if the body will be discarded. + + With modern Werkzeug this shouldn't occur, since it now uses a + 308 status which tells the browser to resend the method and + body. + + .. versionchanged:: 2.1 + Don't intercept 307 and 308 redirects. + + :meta private: + :internal: + """ + if ( + not self.debug + or not isinstance(request.routing_exception, RequestRedirect) + or request.routing_exception.code in {307, 308} + or request.method in {"GET", "HEAD", "OPTIONS"} + ): + raise request.routing_exception # type: ignore + + from .debughelpers import FormDataRoutingRedirect + + raise FormDataRoutingRedirect(request) + + def dispatch_request(self) -> ft.ResponseReturnValue: + """Does the request dispatching. Matches the URL and returns the + return value of the view or error handler. This does not have to + be a response object. In order to convert the return value to a + proper response object, call :func:`make_response`. + + .. versionchanged:: 0.7 + This no longer does the exception handling, this code was + moved to the new :meth:`full_dispatch_request`. + """ + req = request_ctx.request + if req.routing_exception is not None: + self.raise_routing_exception(req) + rule: Rule = req.url_rule # type: ignore[assignment] + # if we provide automatic options for this URL and the + # request came with the OPTIONS method, reply automatically + if ( + getattr(rule, "provide_automatic_options", False) + and req.method == "OPTIONS" + ): + return self.make_default_options_response() + # otherwise dispatch to the handler for that endpoint + view_args: t.Dict[str, t.Any] = req.view_args # type: ignore[assignment] + return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) + + def full_dispatch_request(self) -> Response: + """Dispatches the request and on top of that performs request + pre and postprocessing as well as HTTP exception catching and + error handling. + + .. versionadded:: 0.7 + """ + # Run before_first_request functions if this is the thread's first request. + # Inlined to avoid a method call on subsequent requests. + # This is deprecated, will be removed in Flask 2.3. + if not self._got_first_request: + with self._before_request_lock: + if not self._got_first_request: + for func in self.before_first_request_funcs: + self.ensure_sync(func)() + + self._got_first_request = True + + try: + request_started.send(self) + rv = self.preprocess_request() + if rv is None: + rv = self.dispatch_request() + except Exception as e: + rv = self.handle_user_exception(e) + return self.finalize_request(rv) + + def finalize_request( + self, + rv: t.Union[ft.ResponseReturnValue, HTTPException], + from_error_handler: bool = False, + ) -> Response: + """Given the return value from a view function this finalizes + the request by converting it into a response and invoking the + postprocessing functions. This is invoked for both normal + request dispatching as well as error handlers. + + Because this means that it might be called as a result of a + failure a special safe mode is available which can be enabled + with the `from_error_handler` flag. If enabled, failures in + response processing will be logged and otherwise ignored. + + :internal: + """ + response = self.make_response(rv) + try: + response = self.process_response(response) + request_finished.send(self, response=response) + except Exception: + if not from_error_handler: + raise + self.logger.exception( + "Request finalizing failed with an error while handling an error" + ) + return response + + def make_default_options_response(self) -> Response: + """This method is called to create the default ``OPTIONS`` response. + This can be changed through subclassing to change the default + behavior of ``OPTIONS`` responses. + + .. versionadded:: 0.7 + """ + adapter = request_ctx.url_adapter + methods = adapter.allowed_methods() # type: ignore[union-attr] + rv = self.response_class() + rv.allow.update(methods) + return rv + + def should_ignore_error(self, error: t.Optional[BaseException]) -> bool: + """This is called to figure out if an error should be ignored + or not as far as the teardown system is concerned. If this + function returns ``True`` then the teardown handlers will not be + passed the error. + + .. versionadded:: 0.10 + """ + return False + + def ensure_sync(self, func: t.Callable) -> t.Callable: + """Ensure that the function is synchronous for WSGI workers. + Plain ``def`` functions are returned as-is. ``async def`` + functions are wrapped to run and wait for the response. + + Override this method to change how the app runs async views. + + .. versionadded:: 2.0 + """ + if iscoroutinefunction(func): + return self.async_to_sync(func) + + return func + + def async_to_sync( + self, func: t.Callable[..., t.Coroutine] + ) -> t.Callable[..., t.Any]: + """Return a sync function that will run the coroutine function. + + .. code-block:: python + + result = app.async_to_sync(func)(*args, **kwargs) + + Override this method to change how the app converts async code + to be synchronously callable. + + .. versionadded:: 2.0 + """ + try: + from asgiref.sync import async_to_sync as asgiref_async_to_sync + except ImportError: + raise RuntimeError( + "Install Flask with the 'async' extra in order to use async views." + ) from None + + return asgiref_async_to_sync(func) + + def url_for( + self, + endpoint: str, + *, + _anchor: t.Optional[str] = None, + _method: t.Optional[str] = None, + _scheme: t.Optional[str] = None, + _external: t.Optional[bool] = None, + **values: t.Any, + ) -> str: + """Generate a URL to the given endpoint with the given values. + + This is called by :func:`flask.url_for`, and can be called + directly as well. + + An *endpoint* is the name of a URL rule, usually added with + :meth:`@app.route() `, and usually the same name as the + view function. A route defined in a :class:`~flask.Blueprint` + will prepend the blueprint's name separated by a ``.`` to the + endpoint. + + In some cases, such as email messages, you want URLs to include + the scheme and domain, like ``https://example.com/hello``. When + not in an active request, URLs will be external by default, but + this requires setting :data:`SERVER_NAME` so Flask knows what + domain to use. :data:`APPLICATION_ROOT` and + :data:`PREFERRED_URL_SCHEME` should also be configured as + needed. This config is only used when not in an active request. + + Functions can be decorated with :meth:`url_defaults` to modify + keyword arguments before the URL is built. + + If building fails for some reason, such as an unknown endpoint + or incorrect values, the app's :meth:`handle_url_build_error` + method is called. If that returns a string, that is returned, + otherwise a :exc:`~werkzeug.routing.BuildError` is raised. + + :param endpoint: The endpoint name associated with the URL to + generate. If this starts with a ``.``, the current blueprint + name (if any) will be used. + :param _anchor: If given, append this as ``#anchor`` to the URL. + :param _method: If given, generate the URL associated with this + method for the endpoint. + :param _scheme: If given, the URL will have this scheme if it + is external. + :param _external: If given, prefer the URL to be internal + (False) or require it to be external (True). External URLs + include the scheme and domain. When not in an active + request, URLs are external by default. + :param values: Values to use for the variable parts of the URL + rule. Unknown keys are appended as query string arguments, + like ``?a=b&c=d``. + + .. versionadded:: 2.2 + Moved from ``flask.url_for``, which calls this method. + """ + req_ctx = _cv_request.get(None) + + if req_ctx is not None: + url_adapter = req_ctx.url_adapter + blueprint_name = req_ctx.request.blueprint + + # If the endpoint starts with "." and the request matches a + # blueprint, the endpoint is relative to the blueprint. + if endpoint[:1] == ".": + if blueprint_name is not None: + endpoint = f"{blueprint_name}{endpoint}" + else: + endpoint = endpoint[1:] + + # When in a request, generate a URL without scheme and + # domain by default, unless a scheme is given. + if _external is None: + _external = _scheme is not None + else: + app_ctx = _cv_app.get(None) + + # If called by helpers.url_for, an app context is active, + # use its url_adapter. Otherwise, app.url_for was called + # directly, build an adapter. + if app_ctx is not None: + url_adapter = app_ctx.url_adapter + else: + url_adapter = self.create_url_adapter(None) + + if url_adapter is None: + raise RuntimeError( + "Unable to build URLs outside an active request" + " without 'SERVER_NAME' configured. Also configure" + " 'APPLICATION_ROOT' and 'PREFERRED_URL_SCHEME' as" + " needed." + ) + + # When outside a request, generate a URL with scheme and + # domain by default. + if _external is None: + _external = True + + # It is an error to set _scheme when _external=False, in order + # to avoid accidental insecure URLs. + if _scheme is not None and not _external: + raise ValueError("When specifying '_scheme', '_external' must be True.") + + self.inject_url_defaults(endpoint, values) + + try: + rv = url_adapter.build( # type: ignore[union-attr] + endpoint, + values, + method=_method, + url_scheme=_scheme, + force_external=_external, + ) + except BuildError as error: + values.update( + _anchor=_anchor, _method=_method, _scheme=_scheme, _external=_external + ) + return self.handle_url_build_error(error, endpoint, values) + + if _anchor is not None: + rv = f"{rv}#{url_quote(_anchor)}" + + return rv + + def redirect(self, location: str, code: int = 302) -> BaseResponse: + """Create a redirect response object. + + This is called by :func:`flask.redirect`, and can be called + directly as well. + + :param location: The URL to redirect to. + :param code: The status code for the redirect. + + .. versionadded:: 2.2 + Moved from ``flask.redirect``, which calls this method. + """ + return _wz_redirect(location, code=code, Response=self.response_class) + + def make_response(self, rv: ft.ResponseReturnValue) -> Response: + """Convert the return value from a view function to an instance of + :attr:`response_class`. + + :param rv: the return value from the view function. The view function + must return a response. Returning ``None``, or the view ending + without returning, is not allowed. The following types are allowed + for ``view_rv``: + + ``str`` + A response object is created with the string encoded to UTF-8 + as the body. + + ``bytes`` + A response object is created with the bytes as the body. + + ``dict`` + A dictionary that will be jsonify'd before being returned. + + ``list`` + A list that will be jsonify'd before being returned. + + ``generator`` or ``iterator`` + A generator that returns ``str`` or ``bytes`` to be + streamed as the response. + + ``tuple`` + Either ``(body, status, headers)``, ``(body, status)``, or + ``(body, headers)``, where ``body`` is any of the other types + allowed here, ``status`` is a string or an integer, and + ``headers`` is a dictionary or a list of ``(key, value)`` + tuples. If ``body`` is a :attr:`response_class` instance, + ``status`` overwrites the exiting value and ``headers`` are + extended. + + :attr:`response_class` + The object is returned unchanged. + + other :class:`~werkzeug.wrappers.Response` class + The object is coerced to :attr:`response_class`. + + :func:`callable` + The function is called as a WSGI application. The result is + used to create a response object. + + .. versionchanged:: 2.2 + A generator will be converted to a streaming response. + A list will be converted to a JSON response. + + .. versionchanged:: 1.1 + A dict will be converted to a JSON response. + + .. versionchanged:: 0.9 + Previously a tuple was interpreted as the arguments for the + response object. + """ + + status = headers = None + + # unpack tuple returns + if isinstance(rv, tuple): + len_rv = len(rv) + + # a 3-tuple is unpacked directly + if len_rv == 3: + rv, status, headers = rv # type: ignore[misc] + # decide if a 2-tuple has status or headers + elif len_rv == 2: + if isinstance(rv[1], (Headers, dict, tuple, list)): + rv, headers = rv + else: + rv, status = rv # type: ignore[assignment,misc] + # other sized tuples are not allowed + else: + raise TypeError( + "The view function did not return a valid response tuple." + " The tuple must have the form (body, status, headers)," + " (body, status), or (body, headers)." + ) + + # the body must not be None + if rv is None: + raise TypeError( + f"The view function for {request.endpoint!r} did not" + " return a valid response. The function either returned" + " None or ended without a return statement." + ) + + # make sure the body is an instance of the response class + if not isinstance(rv, self.response_class): + if isinstance(rv, (str, bytes, bytearray)) or isinstance(rv, _abc_Iterator): + # let the response class set the status and headers instead of + # waiting to do it manually, so that the class can handle any + # special logic + rv = self.response_class( + rv, + status=status, + headers=headers, # type: ignore[arg-type] + ) + status = headers = None + elif isinstance(rv, (dict, list)): + rv = self.json.response(rv) + elif isinstance(rv, BaseResponse) or callable(rv): + # evaluate a WSGI callable, or coerce a different response + # class to the correct type + try: + rv = self.response_class.force_type( + rv, request.environ # type: ignore[arg-type] + ) + except TypeError as e: + raise TypeError( + f"{e}\nThe view function did not return a valid" + " response. The return type must be a string," + " dict, list, tuple with headers or status," + " Response instance, or WSGI callable, but it" + f" was a {type(rv).__name__}." + ).with_traceback(sys.exc_info()[2]) from None + else: + raise TypeError( + "The view function did not return a valid" + " response. The return type must be a string," + " dict, list, tuple with headers or status," + " Response instance, or WSGI callable, but it was a" + f" {type(rv).__name__}." + ) + + rv = t.cast(Response, rv) + # prefer the status if it was provided + if status is not None: + if isinstance(status, (str, bytes, bytearray)): + rv.status = status + else: + rv.status_code = status + + # extend existing headers with provided headers + if headers: + rv.headers.update(headers) # type: ignore[arg-type] + + return rv + + def create_url_adapter( + self, request: t.Optional[Request] + ) -> t.Optional[MapAdapter]: + """Creates a URL adapter for the given request. The URL adapter + is created at a point where the request context is not yet set + up so the request is passed explicitly. + + .. versionadded:: 0.6 + + .. versionchanged:: 0.9 + This can now also be called without a request object when the + URL adapter is created for the application context. + + .. versionchanged:: 1.0 + :data:`SERVER_NAME` no longer implicitly enables subdomain + matching. Use :attr:`subdomain_matching` instead. + """ + if request is not None: + # If subdomain matching is disabled (the default), use the + # default subdomain in all cases. This should be the default + # in Werkzeug but it currently does not have that feature. + if not self.subdomain_matching: + subdomain = self.url_map.default_subdomain or None + else: + subdomain = None + + return self.url_map.bind_to_environ( + request.environ, + server_name=self.config["SERVER_NAME"], + subdomain=subdomain, + ) + # We need at the very least the server name to be set for this + # to work. + if self.config["SERVER_NAME"] is not None: + return self.url_map.bind( + self.config["SERVER_NAME"], + script_name=self.config["APPLICATION_ROOT"], + url_scheme=self.config["PREFERRED_URL_SCHEME"], + ) + + return None + + def inject_url_defaults(self, endpoint: str, values: dict) -> None: + """Injects the URL defaults for the given endpoint directly into + the values dictionary passed. This is used internally and + automatically called on URL building. + + .. versionadded:: 0.7 + """ + names: t.Iterable[t.Optional[str]] = (None,) + + # url_for may be called outside a request context, parse the + # passed endpoint instead of using request.blueprints. + if "." in endpoint: + names = chain( + names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) + ) + + for name in names: + if name in self.url_default_functions: + for func in self.url_default_functions[name]: + func(endpoint, values) + + def handle_url_build_error( + self, error: BuildError, endpoint: str, values: t.Dict[str, t.Any] + ) -> str: + """Called by :meth:`.url_for` if a + :exc:`~werkzeug.routing.BuildError` was raised. If this returns + a value, it will be returned by ``url_for``, otherwise the error + will be re-raised. + + Each function in :attr:`url_build_error_handlers` is called with + ``error``, ``endpoint`` and ``values``. If a function returns + ``None`` or raises a ``BuildError``, it is skipped. Otherwise, + its return value is returned by ``url_for``. + + :param error: The active ``BuildError`` being handled. + :param endpoint: The endpoint being built. + :param values: The keyword arguments passed to ``url_for``. + """ + for handler in self.url_build_error_handlers: + try: + rv = handler(error, endpoint, values) + except BuildError as e: + # make error available outside except block + error = e + else: + if rv is not None: + return rv + + # Re-raise if called with an active exception, otherwise raise + # the passed in exception. + if error is sys.exc_info()[1]: + raise + + raise error + + def preprocess_request(self) -> t.Optional[ft.ResponseReturnValue]: + """Called before the request is dispatched. Calls + :attr:`url_value_preprocessors` registered with the app and the + current blueprint (if any). Then calls :attr:`before_request_funcs` + registered with the app and the blueprint. + + If any :meth:`before_request` handler returns a non-None value, the + value is handled as if it was the return value from the view, and + further request handling is stopped. + """ + names = (None, *reversed(request.blueprints)) + + for name in names: + if name in self.url_value_preprocessors: + for url_func in self.url_value_preprocessors[name]: + url_func(request.endpoint, request.view_args) + + for name in names: + if name in self.before_request_funcs: + for before_func in self.before_request_funcs[name]: + rv = self.ensure_sync(before_func)() + + if rv is not None: + return rv + + return None + + def process_response(self, response: Response) -> Response: + """Can be overridden in order to modify the response object + before it's sent to the WSGI server. By default this will + call all the :meth:`after_request` decorated functions. + + .. versionchanged:: 0.5 + As of Flask 0.5 the functions registered for after request + execution are called in reverse order of registration. + + :param response: a :attr:`response_class` object. + :return: a new response object or the same, has to be an + instance of :attr:`response_class`. + """ + ctx = request_ctx._get_current_object() # type: ignore[attr-defined] + + for func in ctx._after_request_functions: + response = self.ensure_sync(func)(response) + + for name in chain(request.blueprints, (None,)): + if name in self.after_request_funcs: + for func in reversed(self.after_request_funcs[name]): + response = self.ensure_sync(func)(response) + + if not self.session_interface.is_null_session(ctx.session): + self.session_interface.save_session(self, ctx.session, response) + + return response + + def do_teardown_request( + self, exc: t.Optional[BaseException] = _sentinel # type: ignore + ) -> None: + """Called after the request is dispatched and the response is + returned, right before the request context is popped. + + This calls all functions decorated with + :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` + if a blueprint handled the request. Finally, the + :data:`request_tearing_down` signal is sent. + + This is called by + :meth:`RequestContext.pop() `, + which may be delayed during testing to maintain access to + resources. + + :param exc: An unhandled exception raised while dispatching the + request. Detected from the current exception information if + not passed. Passed to each teardown function. + + .. versionchanged:: 0.9 + Added the ``exc`` argument. + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + + for name in chain(request.blueprints, (None,)): + if name in self.teardown_request_funcs: + for func in reversed(self.teardown_request_funcs[name]): + self.ensure_sync(func)(exc) + + request_tearing_down.send(self, exc=exc) + + def do_teardown_appcontext( + self, exc: t.Optional[BaseException] = _sentinel # type: ignore + ) -> None: + """Called right before the application context is popped. + + When handling a request, the application context is popped + after the request context. See :meth:`do_teardown_request`. + + This calls all functions decorated with + :meth:`teardown_appcontext`. Then the + :data:`appcontext_tearing_down` signal is sent. + + This is called by + :meth:`AppContext.pop() `. + + .. versionadded:: 0.9 + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + + for func in reversed(self.teardown_appcontext_funcs): + self.ensure_sync(func)(exc) + + appcontext_tearing_down.send(self, exc=exc) + + def app_context(self) -> AppContext: + """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` + block to push the context, which will make :data:`current_app` + point at this application. + + An application context is automatically pushed by + :meth:`RequestContext.push() ` + when handling a request, and when running a CLI command. Use + this to manually create a context outside of these situations. + + :: + + with app.app_context(): + init_db() + + See :doc:`/appcontext`. + + .. versionadded:: 0.9 + """ + return AppContext(self) + + def request_context(self, environ: dict) -> RequestContext: + """Create a :class:`~flask.ctx.RequestContext` representing a + WSGI environment. Use a ``with`` block to push the context, + which will make :data:`request` point at this request. + + See :doc:`/reqcontext`. + + Typically you should not call this from your own code. A request + context is automatically pushed by the :meth:`wsgi_app` when + handling a request. Use :meth:`test_request_context` to create + an environment and context instead of this method. + + :param environ: a WSGI environment + """ + return RequestContext(self, environ) + + def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: + """Create a :class:`~flask.ctx.RequestContext` for a WSGI + environment created from the given values. This is mostly useful + during testing, where you may want to run a function that uses + request data without dispatching a full request. + + See :doc:`/reqcontext`. + + Use a ``with`` block to push the context, which will make + :data:`request` point at the request for the created + environment. :: + + with test_request_context(...): + generate_report() + + When using the shell, it may be easier to push and pop the + context manually to avoid indentation. :: + + ctx = app.test_request_context(...) + ctx.push() + ... + ctx.pop() + + Takes the same arguments as Werkzeug's + :class:`~werkzeug.test.EnvironBuilder`, with some defaults from + the application. See the linked Werkzeug docs for most of the + available arguments. Flask-specific behavior is listed here. + + :param path: URL path being requested. + :param base_url: Base URL where the app is being served, which + ``path`` is relative to. If not given, built from + :data:`PREFERRED_URL_SCHEME`, ``subdomain``, + :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. + :param subdomain: Subdomain name to append to + :data:`SERVER_NAME`. + :param url_scheme: Scheme to use instead of + :data:`PREFERRED_URL_SCHEME`. + :param data: The request body, either as a string or a dict of + form keys and values. + :param json: If given, this is serialized as JSON and passed as + ``data``. Also defaults ``content_type`` to + ``application/json``. + :param args: other positional arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + :param kwargs: other keyword arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + """ + from .testing import EnvironBuilder + + builder = EnvironBuilder(self, *args, **kwargs) + + try: + return self.request_context(builder.get_environ()) + finally: + builder.close() + + def wsgi_app(self, environ: dict, start_response: t.Callable) -> t.Any: + """The actual WSGI application. This is not implemented in + :meth:`__call__` so that middlewares can be applied without + losing a reference to the app object. Instead of doing this:: + + app = MyMiddleware(app) + + It's a better idea to do this instead:: + + app.wsgi_app = MyMiddleware(app.wsgi_app) + + Then you still have the original application object around and + can continue to call methods on it. + + .. versionchanged:: 0.7 + Teardown events for the request and app contexts are called + even if an unhandled error occurs. Other events may not be + called depending on when an error occurs during dispatch. + See :ref:`callbacks-and-errors`. + + :param environ: A WSGI environment. + :param start_response: A callable accepting a status code, + a list of headers, and an optional exception context to + start the response. + """ + ctx = self.request_context(environ) + error: t.Optional[BaseException] = None + try: + try: + ctx.push() + response = self.full_dispatch_request() + except Exception as e: + error = e + response = self.handle_exception(e) + except: # noqa: B001 + error = sys.exc_info()[1] + raise + return response(environ, start_response) + finally: + if "werkzeug.debug.preserve_context" in environ: + environ["werkzeug.debug.preserve_context"](_cv_app.get()) + environ["werkzeug.debug.preserve_context"](_cv_request.get()) + + if error is not None and self.should_ignore_error(error): + error = None + + ctx.pop(error) + + def __call__(self, environ: dict, start_response: t.Callable) -> t.Any: + """The WSGI server calls the Flask application object as the + WSGI application. This calls :meth:`wsgi_app`, which can be + wrapped to apply middleware. + """ + return self.wsgi_app(environ, start_response) diff --git a/venv/lib/python3.8/site-packages/flask/blueprints.py b/venv/lib/python3.8/site-packages/flask/blueprints.py new file mode 100644 index 0000000..104f8ac --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/blueprints.py @@ -0,0 +1,706 @@ +import json +import os +import typing as t +from collections import defaultdict +from functools import update_wrapper + +from . import typing as ft +from .scaffold import _endpoint_from_view_func +from .scaffold import _sentinel +from .scaffold import Scaffold +from .scaffold import setupmethod + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + +DeferredSetupFunction = t.Callable[["BlueprintSetupState"], t.Callable] +T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable) +T_before_first_request = t.TypeVar( + "T_before_first_request", bound=ft.BeforeFirstRequestCallable +) +T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) +T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) +T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) +T_template_context_processor = t.TypeVar( + "T_template_context_processor", bound=ft.TemplateContextProcessorCallable +) +T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) +T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) +T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) +T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) +T_url_value_preprocessor = t.TypeVar( + "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable +) + + +class BlueprintSetupState: + """Temporary holder object for registering a blueprint with the + application. An instance of this class is created by the + :meth:`~flask.Blueprint.make_setup_state` method and later passed + to all register callback functions. + """ + + def __init__( + self, + blueprint: "Blueprint", + app: "Flask", + options: t.Any, + first_registration: bool, + ) -> None: + #: a reference to the current application + self.app = app + + #: a reference to the blueprint that created this setup state. + self.blueprint = blueprint + + #: a dictionary with all options that were passed to the + #: :meth:`~flask.Flask.register_blueprint` method. + self.options = options + + #: as blueprints can be registered multiple times with the + #: application and not everything wants to be registered + #: multiple times on it, this attribute can be used to figure + #: out if the blueprint was registered in the past already. + self.first_registration = first_registration + + subdomain = self.options.get("subdomain") + if subdomain is None: + subdomain = self.blueprint.subdomain + + #: The subdomain that the blueprint should be active for, ``None`` + #: otherwise. + self.subdomain = subdomain + + url_prefix = self.options.get("url_prefix") + if url_prefix is None: + url_prefix = self.blueprint.url_prefix + #: The prefix that should be used for all URLs defined on the + #: blueprint. + self.url_prefix = url_prefix + + self.name = self.options.get("name", blueprint.name) + self.name_prefix = self.options.get("name_prefix", "") + + #: A dictionary with URL defaults that is added to each and every + #: URL that was defined with the blueprint. + self.url_defaults = dict(self.blueprint.url_values_defaults) + self.url_defaults.update(self.options.get("url_defaults", ())) + + def add_url_rule( + self, + rule: str, + endpoint: t.Optional[str] = None, + view_func: t.Optional[t.Callable] = None, + **options: t.Any, + ) -> None: + """A helper method to register a rule (and optionally a view function) + to the application. The endpoint is automatically prefixed with the + blueprint's name. + """ + if self.url_prefix is not None: + if rule: + rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) + else: + rule = self.url_prefix + options.setdefault("subdomain", self.subdomain) + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) # type: ignore + defaults = self.url_defaults + if "defaults" in options: + defaults = dict(defaults, **options.pop("defaults")) + + self.app.add_url_rule( + rule, + f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), + view_func, + defaults=defaults, + **options, + ) + + +class Blueprint(Scaffold): + """Represents a blueprint, a collection of routes and other + app-related functions that can be registered on a real application + later. + + A blueprint is an object that allows defining application functions + without requiring an application object ahead of time. It uses the + same decorators as :class:`~flask.Flask`, but defers the need for an + application by recording them for later registration. + + Decorating a function with a blueprint creates a deferred function + that is called with :class:`~flask.blueprints.BlueprintSetupState` + when the blueprint is registered on an application. + + See :doc:`/blueprints` for more information. + + :param name: The name of the blueprint. Will be prepended to each + endpoint name. + :param import_name: The name of the blueprint package, usually + ``__name__``. This helps locate the ``root_path`` for the + blueprint. + :param static_folder: A folder with static files that should be + served by the blueprint's static route. The path is relative to + the blueprint's root path. Blueprint static files are disabled + by default. + :param static_url_path: The url to serve static files from. + Defaults to ``static_folder``. If the blueprint does not have + a ``url_prefix``, the app's static route will take precedence, + and the blueprint's static files won't be accessible. + :param template_folder: A folder with templates that should be added + to the app's template search path. The path is relative to the + blueprint's root path. Blueprint templates are disabled by + default. Blueprint templates have a lower precedence than those + in the app's templates folder. + :param url_prefix: A path to prepend to all of the blueprint's URLs, + to make them distinct from the rest of the app's routes. + :param subdomain: A subdomain that blueprint routes will match on by + default. + :param url_defaults: A dict of default values that blueprint routes + will receive by default. + :param root_path: By default, the blueprint will automatically set + this based on ``import_name``. In certain situations this + automatic detection can fail, so the path can be specified + manually instead. + + .. versionchanged:: 1.1.0 + Blueprints have a ``cli`` group to register nested CLI commands. + The ``cli_group`` parameter controls the name of the group under + the ``flask`` command. + + .. versionadded:: 0.7 + """ + + _got_registered_once = False + + _json_encoder: t.Union[t.Type[json.JSONEncoder], None] = None + _json_decoder: t.Union[t.Type[json.JSONDecoder], None] = None + + @property # type: ignore[override] + def json_encoder( # type: ignore[override] + self, + ) -> t.Union[t.Type[json.JSONEncoder], None]: + """Blueprint-local JSON encoder class to use. Set to ``None`` to use the app's. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Customize + :attr:`json_provider_class` instead. + + .. versionadded:: 0.10 + """ + import warnings + + warnings.warn( + "'bp.json_encoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._json_encoder + + @json_encoder.setter + def json_encoder(self, value: t.Union[t.Type[json.JSONEncoder], None]) -> None: + import warnings + + warnings.warn( + "'bp.json_encoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._json_encoder = value + + @property # type: ignore[override] + def json_decoder( # type: ignore[override] + self, + ) -> t.Union[t.Type[json.JSONDecoder], None]: + """Blueprint-local JSON decoder class to use. Set to ``None`` to use the app's. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Customize + :attr:`json_provider_class` instead. + + .. versionadded:: 0.10 + """ + import warnings + + warnings.warn( + "'bp.json_decoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._json_decoder + + @json_decoder.setter + def json_decoder(self, value: t.Union[t.Type[json.JSONDecoder], None]) -> None: + import warnings + + warnings.warn( + "'bp.json_decoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._json_decoder = value + + def __init__( + self, + name: str, + import_name: str, + static_folder: t.Optional[t.Union[str, os.PathLike]] = None, + static_url_path: t.Optional[str] = None, + template_folder: t.Optional[str] = None, + url_prefix: t.Optional[str] = None, + subdomain: t.Optional[str] = None, + url_defaults: t.Optional[dict] = None, + root_path: t.Optional[str] = None, + cli_group: t.Optional[str] = _sentinel, # type: ignore + ): + super().__init__( + import_name=import_name, + static_folder=static_folder, + static_url_path=static_url_path, + template_folder=template_folder, + root_path=root_path, + ) + + if "." in name: + raise ValueError("'name' may not contain a dot '.' character.") + + self.name = name + self.url_prefix = url_prefix + self.subdomain = subdomain + self.deferred_functions: t.List[DeferredSetupFunction] = [] + + if url_defaults is None: + url_defaults = {} + + self.url_values_defaults = url_defaults + self.cli_group = cli_group + self._blueprints: t.List[t.Tuple["Blueprint", dict]] = [] + + def _check_setup_finished(self, f_name: str) -> None: + if self._got_registered_once: + import warnings + + warnings.warn( + f"The setup method '{f_name}' can no longer be called on" + f" the blueprint '{self.name}'. It has already been" + " registered at least once, any changes will not be" + " applied consistently.\n" + "Make sure all imports, decorators, functions, etc." + " needed to set up the blueprint are done before" + " registering it.\n" + "This warning will become an exception in Flask 2.3.", + UserWarning, + stacklevel=3, + ) + + @setupmethod + def record(self, func: t.Callable) -> None: + """Registers a function that is called when the blueprint is + registered on the application. This function is called with the + state as argument as returned by the :meth:`make_setup_state` + method. + """ + self.deferred_functions.append(func) + + @setupmethod + def record_once(self, func: t.Callable) -> None: + """Works like :meth:`record` but wraps the function in another + function that will ensure the function is only called once. If the + blueprint is registered a second time on the application, the + function passed is not called. + """ + + def wrapper(state: BlueprintSetupState) -> None: + if state.first_registration: + func(state) + + self.record(update_wrapper(wrapper, func)) + + def make_setup_state( + self, app: "Flask", options: dict, first_registration: bool = False + ) -> BlueprintSetupState: + """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` + object that is later passed to the register callback functions. + Subclasses can override this to return a subclass of the setup state. + """ + return BlueprintSetupState(self, app, options, first_registration) + + @setupmethod + def register_blueprint(self, blueprint: "Blueprint", **options: t.Any) -> None: + """Register a :class:`~flask.Blueprint` on this blueprint. Keyword + arguments passed to this method will override the defaults set + on the blueprint. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionadded:: 2.0 + """ + if blueprint is self: + raise ValueError("Cannot register a blueprint on itself") + self._blueprints.append((blueprint, options)) + + def register(self, app: "Flask", options: dict) -> None: + """Called by :meth:`Flask.register_blueprint` to register all + views and callbacks registered on the blueprint with the + application. Creates a :class:`.BlueprintSetupState` and calls + each :meth:`record` callback with it. + + :param app: The application this blueprint is being registered + with. + :param options: Keyword arguments forwarded from + :meth:`~Flask.register_blueprint`. + + .. versionchanged:: 2.0.1 + Nested blueprints are registered with their dotted name. + This allows different blueprints with the same name to be + nested at different locations. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionchanged:: 2.0.1 + Registering the same blueprint with the same name multiple + times is deprecated and will become an error in Flask 2.1. + """ + name_prefix = options.get("name_prefix", "") + self_name = options.get("name", self.name) + name = f"{name_prefix}.{self_name}".lstrip(".") + + if name in app.blueprints: + bp_desc = "this" if app.blueprints[name] is self else "a different" + existing_at = f" '{name}'" if self_name != name else "" + + raise ValueError( + f"The name '{self_name}' is already registered for" + f" {bp_desc} blueprint{existing_at}. Use 'name=' to" + f" provide a unique name." + ) + + first_bp_registration = not any(bp is self for bp in app.blueprints.values()) + first_name_registration = name not in app.blueprints + + app.blueprints[name] = self + self._got_registered_once = True + state = self.make_setup_state(app, options, first_bp_registration) + + if self.has_static_folder: + state.add_url_rule( + f"{self.static_url_path}/", + view_func=self.send_static_file, + endpoint="static", + ) + + # Merge blueprint data into parent. + if first_bp_registration or first_name_registration: + + def extend(bp_dict, parent_dict): + for key, values in bp_dict.items(): + key = name if key is None else f"{name}.{key}" + parent_dict[key].extend(values) + + for key, value in self.error_handler_spec.items(): + key = name if key is None else f"{name}.{key}" + value = defaultdict( + dict, + { + code: { + exc_class: func for exc_class, func in code_values.items() + } + for code, code_values in value.items() + }, + ) + app.error_handler_spec[key] = value + + for endpoint, func in self.view_functions.items(): + app.view_functions[endpoint] = func + + extend(self.before_request_funcs, app.before_request_funcs) + extend(self.after_request_funcs, app.after_request_funcs) + extend( + self.teardown_request_funcs, + app.teardown_request_funcs, + ) + extend(self.url_default_functions, app.url_default_functions) + extend(self.url_value_preprocessors, app.url_value_preprocessors) + extend(self.template_context_processors, app.template_context_processors) + + for deferred in self.deferred_functions: + deferred(state) + + cli_resolved_group = options.get("cli_group", self.cli_group) + + if self.cli.commands: + if cli_resolved_group is None: + app.cli.commands.update(self.cli.commands) + elif cli_resolved_group is _sentinel: + self.cli.name = name + app.cli.add_command(self.cli) + else: + self.cli.name = cli_resolved_group + app.cli.add_command(self.cli) + + for blueprint, bp_options in self._blueprints: + bp_options = bp_options.copy() + bp_url_prefix = bp_options.get("url_prefix") + + if bp_url_prefix is None: + bp_url_prefix = blueprint.url_prefix + + if state.url_prefix is not None and bp_url_prefix is not None: + bp_options["url_prefix"] = ( + state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") + ) + elif bp_url_prefix is not None: + bp_options["url_prefix"] = bp_url_prefix + elif state.url_prefix is not None: + bp_options["url_prefix"] = state.url_prefix + + bp_options["name_prefix"] = name + blueprint.register(app, bp_options) + + @setupmethod + def add_url_rule( + self, + rule: str, + endpoint: t.Optional[str] = None, + view_func: t.Optional[ft.RouteCallable] = None, + provide_automatic_options: t.Optional[bool] = None, + **options: t.Any, + ) -> None: + """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for + the :func:`url_for` function is prefixed with the name of the blueprint. + """ + if endpoint and "." in endpoint: + raise ValueError("'endpoint' may not contain a dot '.' character.") + + if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: + raise ValueError("'view_func' name may not contain a dot '.' character.") + + self.record( + lambda s: s.add_url_rule( + rule, + endpoint, + view_func, + provide_automatic_options=provide_automatic_options, + **options, + ) + ) + + @setupmethod + def app_template_filter( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_filter], T_template_filter]: + """Register a custom template filter, available application wide. Like + :meth:`Flask.template_filter` but for a blueprint. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f: T_template_filter) -> T_template_filter: + self.add_app_template_filter(f, name=name) + return f + + return decorator + + @setupmethod + def add_app_template_filter( + self, f: ft.TemplateFilterCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template filter, available application wide. Like + :meth:`Flask.add_template_filter` but for a blueprint. Works exactly + like the :meth:`app_template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.filters[name or f.__name__] = f + + self.record_once(register_template) + + @setupmethod + def app_template_test( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_test], T_template_test]: + """Register a custom template test, available application wide. Like + :meth:`Flask.template_test` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f: T_template_test) -> T_template_test: + self.add_app_template_test(f, name=name) + return f + + return decorator + + @setupmethod + def add_app_template_test( + self, f: ft.TemplateTestCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template test, available application wide. Like + :meth:`Flask.add_template_test` but for a blueprint. Works exactly + like the :meth:`app_template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.tests[name or f.__name__] = f + + self.record_once(register_template) + + @setupmethod + def app_template_global( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_global], T_template_global]: + """Register a custom template global, available application wide. Like + :meth:`Flask.template_global` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def decorator(f: T_template_global) -> T_template_global: + self.add_app_template_global(f, name=name) + return f + + return decorator + + @setupmethod + def add_app_template_global( + self, f: ft.TemplateGlobalCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template global, available application wide. Like + :meth:`Flask.add_template_global` but for a blueprint. Works exactly + like the :meth:`app_template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.globals[name or f.__name__] = f + + self.record_once(register_template) + + @setupmethod + def before_app_request(self, f: T_before_request) -> T_before_request: + """Like :meth:`Flask.before_request`. Such a function is executed + before each request, even if outside of a blueprint. + """ + self.record_once( + lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def before_app_first_request( + self, f: T_before_first_request + ) -> T_before_first_request: + """Like :meth:`Flask.before_first_request`. Such a function is + executed before the first request to the application. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Run setup code when creating + the application instead. + """ + import warnings + + warnings.warn( + "'before_app_first_request' is deprecated and will be" + " removed in Flask 2.3. Use 'record_once' instead to run" + " setup code when registering the blueprint.", + DeprecationWarning, + stacklevel=2, + ) + self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) + return f + + @setupmethod + def after_app_request(self, f: T_after_request) -> T_after_request: + """Like :meth:`Flask.after_request` but for a blueprint. Such a function + is executed after each request, even if outside of the blueprint. + """ + self.record_once( + lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def teardown_app_request(self, f: T_teardown) -> T_teardown: + """Like :meth:`Flask.teardown_request` but for a blueprint. Such a + function is executed when tearing down each request, even if outside of + the blueprint. + """ + self.record_once( + lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def app_context_processor( + self, f: T_template_context_processor + ) -> T_template_context_processor: + """Like :meth:`Flask.context_processor` but for a blueprint. Such a + function is executed each request, even if outside of the blueprint. + """ + self.record_once( + lambda s: s.app.template_context_processors.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def app_errorhandler( + self, code: t.Union[t.Type[Exception], int] + ) -> t.Callable[[T_error_handler], T_error_handler]: + """Like :meth:`Flask.errorhandler` but for a blueprint. This + handler is used for all requests, even if outside of the blueprint. + """ + + def decorator(f: T_error_handler) -> T_error_handler: + self.record_once(lambda s: s.app.errorhandler(code)(f)) + return f + + return decorator + + @setupmethod + def app_url_value_preprocessor( + self, f: T_url_value_preprocessor + ) -> T_url_value_preprocessor: + """Same as :meth:`url_value_preprocessor` but application wide.""" + self.record_once( + lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def app_url_defaults(self, f: T_url_defaults) -> T_url_defaults: + """Same as :meth:`url_defaults` but application wide.""" + self.record_once( + lambda s: s.app.url_default_functions.setdefault(None, []).append(f) + ) + return f diff --git a/venv/lib/python3.8/site-packages/flask/cli.py b/venv/lib/python3.8/site-packages/flask/cli.py new file mode 100644 index 0000000..82fe819 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/cli.py @@ -0,0 +1,1051 @@ +from __future__ import annotations + +import ast +import inspect +import os +import platform +import re +import sys +import traceback +import typing as t +from functools import update_wrapper +from operator import attrgetter + +import click +from click.core import ParameterSource +from werkzeug import run_simple +from werkzeug.serving import is_running_from_reloader +from werkzeug.utils import import_string + +from .globals import current_app +from .helpers import get_debug_flag +from .helpers import get_load_dotenv + +if t.TYPE_CHECKING: + from .app import Flask + + +class NoAppException(click.UsageError): + """Raised if an application cannot be found or loaded.""" + + +def find_best_app(module): + """Given a module instance this tries to find the best possible + application in the module or raises an exception. + """ + from . import Flask + + # Search for the most common names first. + for attr_name in ("app", "application"): + app = getattr(module, attr_name, None) + + if isinstance(app, Flask): + return app + + # Otherwise find the only object that is a Flask instance. + matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] + + if len(matches) == 1: + return matches[0] + elif len(matches) > 1: + raise NoAppException( + "Detected multiple Flask applications in module" + f" '{module.__name__}'. Use '{module.__name__}:name'" + " to specify the correct one." + ) + + # Search for app factory functions. + for attr_name in ("create_app", "make_app"): + app_factory = getattr(module, attr_name, None) + + if inspect.isfunction(app_factory): + try: + app = app_factory() + + if isinstance(app, Flask): + return app + except TypeError as e: + if not _called_with_wrong_args(app_factory): + raise + + raise NoAppException( + f"Detected factory '{attr_name}' in module '{module.__name__}'," + " but could not call it without arguments. Use" + f" '{module.__name__}:{attr_name}(args)'" + " to specify arguments." + ) from e + + raise NoAppException( + "Failed to find Flask application or factory in module" + f" '{module.__name__}'. Use '{module.__name__}:name'" + " to specify one." + ) + + +def _called_with_wrong_args(f): + """Check whether calling a function raised a ``TypeError`` because + the call failed or because something in the factory raised the + error. + + :param f: The function that was called. + :return: ``True`` if the call failed. + """ + tb = sys.exc_info()[2] + + try: + while tb is not None: + if tb.tb_frame.f_code is f.__code__: + # In the function, it was called successfully. + return False + + tb = tb.tb_next + + # Didn't reach the function. + return True + finally: + # Delete tb to break a circular reference. + # https://docs.python.org/2/library/sys.html#sys.exc_info + del tb + + +def find_app_by_string(module, app_name): + """Check if the given string is a variable name or a function. Call + a function to get the app instance, or return the variable directly. + """ + from . import Flask + + # Parse app_name as a single expression to determine if it's a valid + # attribute name or function call. + try: + expr = ast.parse(app_name.strip(), mode="eval").body + except SyntaxError: + raise NoAppException( + f"Failed to parse {app_name!r} as an attribute name or function call." + ) from None + + if isinstance(expr, ast.Name): + name = expr.id + args = [] + kwargs = {} + elif isinstance(expr, ast.Call): + # Ensure the function name is an attribute name only. + if not isinstance(expr.func, ast.Name): + raise NoAppException( + f"Function reference must be a simple name: {app_name!r}." + ) + + name = expr.func.id + + # Parse the positional and keyword arguments as literals. + try: + args = [ast.literal_eval(arg) for arg in expr.args] + kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expr.keywords} + except ValueError: + # literal_eval gives cryptic error messages, show a generic + # message with the full expression instead. + raise NoAppException( + f"Failed to parse arguments as literal values: {app_name!r}." + ) from None + else: + raise NoAppException( + f"Failed to parse {app_name!r} as an attribute name or function call." + ) + + try: + attr = getattr(module, name) + except AttributeError as e: + raise NoAppException( + f"Failed to find attribute {name!r} in {module.__name__!r}." + ) from e + + # If the attribute is a function, call it with any args and kwargs + # to get the real application. + if inspect.isfunction(attr): + try: + app = attr(*args, **kwargs) + except TypeError as e: + if not _called_with_wrong_args(attr): + raise + + raise NoAppException( + f"The factory {app_name!r} in module" + f" {module.__name__!r} could not be called with the" + " specified arguments." + ) from e + else: + app = attr + + if isinstance(app, Flask): + return app + + raise NoAppException( + "A valid Flask application was not obtained from" + f" '{module.__name__}:{app_name}'." + ) + + +def prepare_import(path): + """Given a filename this will try to calculate the python path, add it + to the search path and return the actual module name that is expected. + """ + path = os.path.realpath(path) + + fname, ext = os.path.splitext(path) + if ext == ".py": + path = fname + + if os.path.basename(path) == "__init__": + path = os.path.dirname(path) + + module_name = [] + + # move up until outside package structure (no __init__.py) + while True: + path, name = os.path.split(path) + module_name.append(name) + + if not os.path.exists(os.path.join(path, "__init__.py")): + break + + if sys.path[0] != path: + sys.path.insert(0, path) + + return ".".join(module_name[::-1]) + + +def locate_app(module_name, app_name, raise_if_not_found=True): + try: + __import__(module_name) + except ImportError: + # Reraise the ImportError if it occurred within the imported module. + # Determine this by checking whether the trace has a depth > 1. + if sys.exc_info()[2].tb_next: + raise NoAppException( + f"While importing {module_name!r}, an ImportError was" + f" raised:\n\n{traceback.format_exc()}" + ) from None + elif raise_if_not_found: + raise NoAppException(f"Could not import {module_name!r}.") from None + else: + return + + module = sys.modules[module_name] + + if app_name is None: + return find_best_app(module) + else: + return find_app_by_string(module, app_name) + + +def get_version(ctx, param, value): + if not value or ctx.resilient_parsing: + return + + import werkzeug + from . import __version__ + + click.echo( + f"Python {platform.python_version()}\n" + f"Flask {__version__}\n" + f"Werkzeug {werkzeug.__version__}", + color=ctx.color, + ) + ctx.exit() + + +version_option = click.Option( + ["--version"], + help="Show the Flask version.", + expose_value=False, + callback=get_version, + is_flag=True, + is_eager=True, +) + + +class ScriptInfo: + """Helper object to deal with Flask applications. This is usually not + necessary to interface with as it's used internally in the dispatching + to click. In future versions of Flask this object will most likely play + a bigger role. Typically it's created automatically by the + :class:`FlaskGroup` but you can also manually create it and pass it + onwards as click object. + """ + + def __init__( + self, + app_import_path: str | None = None, + create_app: t.Callable[..., Flask] | None = None, + set_debug_flag: bool = True, + ) -> None: + #: Optionally the import path for the Flask application. + self.app_import_path = app_import_path + #: Optionally a function that is passed the script info to create + #: the instance of the application. + self.create_app = create_app + #: A dictionary with arbitrary data that can be associated with + #: this script info. + self.data: t.Dict[t.Any, t.Any] = {} + self.set_debug_flag = set_debug_flag + self._loaded_app: Flask | None = None + + def load_app(self) -> Flask: + """Loads the Flask app (if not yet loaded) and returns it. Calling + this multiple times will just result in the already loaded app to + be returned. + """ + if self._loaded_app is not None: + return self._loaded_app + + if self.create_app is not None: + app = self.create_app() + else: + if self.app_import_path: + path, name = ( + re.split(r":(?![\\/])", self.app_import_path, 1) + [None] + )[:2] + import_name = prepare_import(path) + app = locate_app(import_name, name) + else: + for path in ("wsgi.py", "app.py"): + import_name = prepare_import(path) + app = locate_app(import_name, None, raise_if_not_found=False) + + if app: + break + + if not app: + raise NoAppException( + "Could not locate a Flask application. Use the" + " 'flask --app' option, 'FLASK_APP' environment" + " variable, or a 'wsgi.py' or 'app.py' file in the" + " current directory." + ) + + if self.set_debug_flag: + # Update the app's debug flag through the descriptor so that + # other values repopulate as well. + app.debug = get_debug_flag() + + self._loaded_app = app + return app + + +pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) + + +def with_appcontext(f): + """Wraps a callback so that it's guaranteed to be executed with the + script's application context. + + Custom commands (and their options) registered under ``app.cli`` or + ``blueprint.cli`` will always have an app context available, this + decorator is not required in that case. + + .. versionchanged:: 2.2 + The app context is active for subcommands as well as the + decorated callback. The app context is always available to + ``app.cli`` command and parameter callbacks. + """ + + @click.pass_context + def decorator(__ctx, *args, **kwargs): + if not current_app: + app = __ctx.ensure_object(ScriptInfo).load_app() + __ctx.with_resource(app.app_context()) + + return __ctx.invoke(f, *args, **kwargs) + + return update_wrapper(decorator, f) + + +class AppGroup(click.Group): + """This works similar to a regular click :class:`~click.Group` but it + changes the behavior of the :meth:`command` decorator so that it + automatically wraps the functions in :func:`with_appcontext`. + + Not to be confused with :class:`FlaskGroup`. + """ + + def command(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` + unless it's disabled by passing ``with_appcontext=False``. + """ + wrap_for_ctx = kwargs.pop("with_appcontext", True) + + def decorator(f): + if wrap_for_ctx: + f = with_appcontext(f) + return click.Group.command(self, *args, **kwargs)(f) + + return decorator + + def group(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it defaults the group class to + :class:`AppGroup`. + """ + kwargs.setdefault("cls", AppGroup) + return click.Group.group(self, *args, **kwargs) + + +def _set_app(ctx: click.Context, param: click.Option, value: str | None) -> str | None: + if value is None: + return None + + info = ctx.ensure_object(ScriptInfo) + info.app_import_path = value + return value + + +# This option is eager so the app will be available if --help is given. +# --help is also eager, so --app must be before it in the param list. +# no_args_is_help bypasses eager processing, so this option must be +# processed manually in that case to ensure FLASK_APP gets picked up. +_app_option = click.Option( + ["-A", "--app"], + metavar="IMPORT", + help=( + "The Flask application or factory function to load, in the form 'module:name'." + " Module can be a dotted import or file path. Name is not required if it is" + " 'app', 'application', 'create_app', or 'make_app', and can be 'name(args)' to" + " pass arguments." + ), + is_eager=True, + expose_value=False, + callback=_set_app, +) + + +def _set_debug(ctx: click.Context, param: click.Option, value: bool) -> bool | None: + # If the flag isn't provided, it will default to False. Don't use + # that, let debug be set by env in that case. + source = ctx.get_parameter_source(param.name) # type: ignore[arg-type] + + if source is not None and source in ( + ParameterSource.DEFAULT, + ParameterSource.DEFAULT_MAP, + ): + return None + + # Set with env var instead of ScriptInfo.load so that it can be + # accessed early during a factory function. + os.environ["FLASK_DEBUG"] = "1" if value else "0" + return value + + +_debug_option = click.Option( + ["--debug/--no-debug"], + help="Set debug mode.", + expose_value=False, + callback=_set_debug, +) + + +def _env_file_callback( + ctx: click.Context, param: click.Option, value: str | None +) -> str | None: + if value is None: + return None + + import importlib + + try: + importlib.import_module("dotenv") + except ImportError: + raise click.BadParameter( + "python-dotenv must be installed to load an env file.", + ctx=ctx, + param=param, + ) from None + + # Don't check FLASK_SKIP_DOTENV, that only disables automatically + # loading .env and .flaskenv files. + load_dotenv(value) + return value + + +# This option is eager so env vars are loaded as early as possible to be +# used by other options. +_env_file_option = click.Option( + ["-e", "--env-file"], + type=click.Path(exists=True, dir_okay=False), + help="Load environment variables from this file. python-dotenv must be installed.", + is_eager=True, + expose_value=False, + callback=_env_file_callback, +) + + +class FlaskGroup(AppGroup): + """Special subclass of the :class:`AppGroup` group that supports + loading more commands from the configured Flask app. Normally a + developer does not have to interface with this class but there are + some very advanced use cases for which it makes sense to create an + instance of this. see :ref:`custom-scripts`. + + :param add_default_commands: if this is True then the default run and + shell commands will be added. + :param add_version_option: adds the ``--version`` option. + :param create_app: an optional callback that is passed the script info and + returns the loaded app. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param set_debug_flag: Set the app's debug flag. + + .. versionchanged:: 2.2 + Added the ``-A/--app``, ``--debug/--no-debug``, ``-e/--env-file`` options. + + .. versionchanged:: 2.2 + An app context is pushed when running ``app.cli`` commands, so + ``@with_appcontext`` is no longer required for those commands. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment variables + from :file:`.env` and :file:`.flaskenv` files. + """ + + def __init__( + self, + add_default_commands: bool = True, + create_app: t.Callable[..., Flask] | None = None, + add_version_option: bool = True, + load_dotenv: bool = True, + set_debug_flag: bool = True, + **extra: t.Any, + ) -> None: + params = list(extra.pop("params", None) or ()) + # Processing is done with option callbacks instead of a group + # callback. This allows users to make a custom group callback + # without losing the behavior. --env-file must come first so + # that it is eagerly evaluated before --app. + params.extend((_env_file_option, _app_option, _debug_option)) + + if add_version_option: + params.append(version_option) + + if "context_settings" not in extra: + extra["context_settings"] = {} + + extra["context_settings"].setdefault("auto_envvar_prefix", "FLASK") + + super().__init__(params=params, **extra) + + self.create_app = create_app + self.load_dotenv = load_dotenv + self.set_debug_flag = set_debug_flag + + if add_default_commands: + self.add_command(run_command) + self.add_command(shell_command) + self.add_command(routes_command) + + self._loaded_plugin_commands = False + + def _load_plugin_commands(self): + if self._loaded_plugin_commands: + return + + if sys.version_info >= (3, 10): + from importlib import metadata + else: + # Use a backport on Python < 3.10. We technically have + # importlib.metadata on 3.8+, but the API changed in 3.10, + # so use the backport for consistency. + import importlib_metadata as metadata + + for ep in metadata.entry_points(group="flask.commands"): + self.add_command(ep.load(), ep.name) + + self._loaded_plugin_commands = True + + def get_command(self, ctx, name): + self._load_plugin_commands() + # Look up built-in and plugin commands, which should be + # available even if the app fails to load. + rv = super().get_command(ctx, name) + + if rv is not None: + return rv + + info = ctx.ensure_object(ScriptInfo) + + # Look up commands provided by the app, showing an error and + # continuing if the app couldn't be loaded. + try: + app = info.load_app() + except NoAppException as e: + click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") + return None + + # Push an app context for the loaded app unless it is already + # active somehow. This makes the context available to parameter + # and command callbacks without needing @with_appcontext. + if not current_app or current_app._get_current_object() is not app: + ctx.with_resource(app.app_context()) + + return app.cli.get_command(ctx, name) + + def list_commands(self, ctx): + self._load_plugin_commands() + # Start with the built-in and plugin commands. + rv = set(super().list_commands(ctx)) + info = ctx.ensure_object(ScriptInfo) + + # Add commands provided by the app, showing an error and + # continuing if the app couldn't be loaded. + try: + rv.update(info.load_app().cli.list_commands(ctx)) + except NoAppException as e: + # When an app couldn't be loaded, show the error message + # without the traceback. + click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") + except Exception: + # When any other errors occurred during loading, show the + # full traceback. + click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") + + return sorted(rv) + + def make_context( + self, + info_name: str | None, + args: list[str], + parent: click.Context | None = None, + **extra: t.Any, + ) -> click.Context: + # Set a flag to tell app.run to become a no-op. If app.run was + # not in a __name__ == __main__ guard, it would start the server + # when importing, blocking whatever command is being called. + os.environ["FLASK_RUN_FROM_CLI"] = "true" + + # Attempt to load .env and .flask env files. The --env-file + # option can cause another file to be loaded. + if get_load_dotenv(self.load_dotenv): + load_dotenv() + + if "obj" not in extra and "obj" not in self.context_settings: + extra["obj"] = ScriptInfo( + create_app=self.create_app, set_debug_flag=self.set_debug_flag + ) + + return super().make_context(info_name, args, parent=parent, **extra) + + def parse_args(self, ctx: click.Context, args: list[str]) -> list[str]: + if not args and self.no_args_is_help: + # Attempt to load --env-file and --app early in case they + # were given as env vars. Otherwise no_args_is_help will not + # see commands from app.cli. + _env_file_option.handle_parse_result(ctx, {}, []) + _app_option.handle_parse_result(ctx, {}, []) + + return super().parse_args(ctx, args) + + +def _path_is_ancestor(path, other): + """Take ``other`` and remove the length of ``path`` from it. Then join it + to ``path``. If it is the original value, ``path`` is an ancestor of + ``other``.""" + return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other + + +def load_dotenv(path: str | os.PathLike | None = None) -> bool: + """Load "dotenv" files in order of precedence to set environment variables. + + If an env var is already set it is not overwritten, so earlier files in the + list are preferred over later files. + + This is a no-op if `python-dotenv`_ is not installed. + + .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme + + :param path: Load the file at this location instead of searching. + :return: ``True`` if a file was loaded. + + .. versionchanged:: 2.0 + The current directory is not changed to the location of the + loaded file. + + .. versionchanged:: 2.0 + When loading the env files, set the default encoding to UTF-8. + + .. versionchanged:: 1.1.0 + Returns ``False`` when python-dotenv is not installed, or when + the given path isn't a file. + + .. versionadded:: 1.0 + """ + try: + import dotenv + except ImportError: + if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): + click.secho( + " * Tip: There are .env or .flaskenv files present." + ' Do "pip install python-dotenv" to use them.', + fg="yellow", + err=True, + ) + + return False + + # Always return after attempting to load a given path, don't load + # the default files. + if path is not None: + if os.path.isfile(path): + return dotenv.load_dotenv(path, encoding="utf-8") + + return False + + loaded = False + + for name in (".env", ".flaskenv"): + path = dotenv.find_dotenv(name, usecwd=True) + + if not path: + continue + + dotenv.load_dotenv(path, encoding="utf-8") + loaded = True + + return loaded # True if at least one file was located and loaded. + + +def show_server_banner(debug, app_import_path): + """Show extra startup messages the first time the server is run, + ignoring the reloader. + """ + if is_running_from_reloader(): + return + + if app_import_path is not None: + click.echo(f" * Serving Flask app '{app_import_path}'") + + if debug is not None: + click.echo(f" * Debug mode: {'on' if debug else 'off'}") + + +class CertParamType(click.ParamType): + """Click option type for the ``--cert`` option. Allows either an + existing file, the string ``'adhoc'``, or an import for a + :class:`~ssl.SSLContext` object. + """ + + name = "path" + + def __init__(self): + self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) + + def convert(self, value, param, ctx): + try: + import ssl + except ImportError: + raise click.BadParameter( + 'Using "--cert" requires Python to be compiled with SSL support.', + ctx, + param, + ) from None + + try: + return self.path_type(value, param, ctx) + except click.BadParameter: + value = click.STRING(value, param, ctx).lower() + + if value == "adhoc": + try: + import cryptography # noqa: F401 + except ImportError: + raise click.BadParameter( + "Using ad-hoc certificates requires the cryptography library.", + ctx, + param, + ) from None + + return value + + obj = import_string(value, silent=True) + + if isinstance(obj, ssl.SSLContext): + return obj + + raise + + +def _validate_key(ctx, param, value): + """The ``--key`` option must be specified when ``--cert`` is a file. + Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. + """ + cert = ctx.params.get("cert") + is_adhoc = cert == "adhoc" + + try: + import ssl + except ImportError: + is_context = False + else: + is_context = isinstance(cert, ssl.SSLContext) + + if value is not None: + if is_adhoc: + raise click.BadParameter( + 'When "--cert" is "adhoc", "--key" is not used.', ctx, param + ) + + if is_context: + raise click.BadParameter( + 'When "--cert" is an SSLContext object, "--key is not used.', ctx, param + ) + + if not cert: + raise click.BadParameter('"--cert" must also be specified.', ctx, param) + + ctx.params["cert"] = cert, value + + else: + if cert and not (is_adhoc or is_context): + raise click.BadParameter('Required when using "--cert".', ctx, param) + + return value + + +class SeparatedPathType(click.Path): + """Click option type that accepts a list of values separated by the + OS's path separator (``:``, ``;`` on Windows). Each value is + validated as a :class:`click.Path` type. + """ + + def convert(self, value, param, ctx): + items = self.split_envvar_value(value) + super_convert = super().convert + return [super_convert(item, param, ctx) for item in items] + + +@click.command("run", short_help="Run a development server.") +@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") +@click.option("--port", "-p", default=5000, help="The port to bind to.") +@click.option( + "--cert", + type=CertParamType(), + help="Specify a certificate file to use HTTPS.", + is_eager=True, +) +@click.option( + "--key", + type=click.Path(exists=True, dir_okay=False, resolve_path=True), + callback=_validate_key, + expose_value=False, + help="The key file to use when specifying a certificate.", +) +@click.option( + "--reload/--no-reload", + default=None, + help="Enable or disable the reloader. By default the reloader " + "is active if debug is enabled.", +) +@click.option( + "--debugger/--no-debugger", + default=None, + help="Enable or disable the debugger. By default the debugger " + "is active if debug is enabled.", +) +@click.option( + "--with-threads/--without-threads", + default=True, + help="Enable or disable multithreading.", +) +@click.option( + "--extra-files", + default=None, + type=SeparatedPathType(), + help=( + "Extra files that trigger a reload on change. Multiple paths" + f" are separated by {os.path.pathsep!r}." + ), +) +@click.option( + "--exclude-patterns", + default=None, + type=SeparatedPathType(), + help=( + "Files matching these fnmatch patterns will not trigger a reload" + " on change. Multiple patterns are separated by" + f" {os.path.pathsep!r}." + ), +) +@pass_script_info +def run_command( + info, + host, + port, + reload, + debugger, + with_threads, + cert, + extra_files, + exclude_patterns, +): + """Run a local development server. + + This server is for development purposes only. It does not provide + the stability, security, or performance of production WSGI servers. + + The reloader and debugger are enabled by default with the '--debug' + option. + """ + try: + app = info.load_app() + except Exception as e: + if is_running_from_reloader(): + # When reloading, print out the error immediately, but raise + # it later so the debugger or server can handle it. + traceback.print_exc() + err = e + + def app(environ, start_response): + raise err from None + + else: + # When not reloading, raise the error immediately so the + # command fails. + raise e from None + + debug = get_debug_flag() + + if reload is None: + reload = debug + + if debugger is None: + debugger = debug + + show_server_banner(debug, info.app_import_path) + + run_simple( + host, + port, + app, + use_reloader=reload, + use_debugger=debugger, + threaded=with_threads, + ssl_context=cert, + extra_files=extra_files, + exclude_patterns=exclude_patterns, + ) + + +@click.command("shell", short_help="Run a shell in the app context.") +@with_appcontext +def shell_command() -> None: + """Run an interactive Python shell in the context of a given + Flask application. The application will populate the default + namespace of this shell according to its configuration. + + This is useful for executing small snippets of management code + without having to manually configure the application. + """ + import code + + banner = ( + f"Python {sys.version} on {sys.platform}\n" + f"App: {current_app.import_name}\n" + f"Instance: {current_app.instance_path}" + ) + ctx: dict = {} + + # Support the regular Python interpreter startup script if someone + # is using it. + startup = os.environ.get("PYTHONSTARTUP") + if startup and os.path.isfile(startup): + with open(startup) as f: + eval(compile(f.read(), startup, "exec"), ctx) + + ctx.update(current_app.make_shell_context()) + + # Site, customize, or startup script can set a hook to call when + # entering interactive mode. The default one sets up readline with + # tab and history completion. + interactive_hook = getattr(sys, "__interactivehook__", None) + + if interactive_hook is not None: + try: + import readline + from rlcompleter import Completer + except ImportError: + pass + else: + # rlcompleter uses __main__.__dict__ by default, which is + # flask.__main__. Use the shell context instead. + readline.set_completer(Completer(ctx).complete) + + interactive_hook() + + code.interact(banner=banner, local=ctx) + + +@click.command("routes", short_help="Show the routes for the app.") +@click.option( + "--sort", + "-s", + type=click.Choice(("endpoint", "methods", "rule", "match")), + default="endpoint", + help=( + 'Method to sort routes by. "match" is the order that Flask will match ' + "routes when dispatching a request." + ), +) +@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") +@with_appcontext +def routes_command(sort: str, all_methods: bool) -> None: + """Show all registered routes with endpoints and methods.""" + + rules = list(current_app.url_map.iter_rules()) + if not rules: + click.echo("No routes were registered.") + return + + ignored_methods = set(() if all_methods else ("HEAD", "OPTIONS")) + + if sort in ("endpoint", "rule"): + rules = sorted(rules, key=attrgetter(sort)) + elif sort == "methods": + rules = sorted(rules, key=lambda rule: sorted(rule.methods)) # type: ignore + + rule_methods = [ + ", ".join(sorted(rule.methods - ignored_methods)) # type: ignore + for rule in rules + ] + + headers = ("Endpoint", "Methods", "Rule") + widths = ( + max(len(rule.endpoint) for rule in rules), + max(len(methods) for methods in rule_methods), + max(len(rule.rule) for rule in rules), + ) + widths = [max(len(h), w) for h, w in zip(headers, widths)] + row = "{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}".format(*widths) + + click.echo(row.format(*headers).strip()) + click.echo(row.format(*("-" * width for width in widths))) + + for rule, methods in zip(rules, rule_methods): + click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip()) + + +cli = FlaskGroup( + name="flask", + help="""\ +A general utility script for Flask applications. + +An application to load must be given with the '--app' option, +'FLASK_APP' environment variable, or with a 'wsgi.py' or 'app.py' file +in the current directory. +""", +) + + +def main() -> None: + cli.main() + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.8/site-packages/flask/config.py b/venv/lib/python3.8/site-packages/flask/config.py new file mode 120000 index 0000000..1f81a0b --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/config.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/21/6a/87/79c1f8a680f1344520e14fd903509394be412990d7de87c6e14198ca2d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/flask/ctx.py b/venv/lib/python3.8/site-packages/flask/ctx.py new file mode 100644 index 0000000..ca28449 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/ctx.py @@ -0,0 +1,438 @@ +import contextvars +import sys +import typing as t +from functools import update_wrapper +from types import TracebackType + +from werkzeug.exceptions import HTTPException + +from . import typing as ft +from .globals import _cv_app +from .globals import _cv_request +from .signals import appcontext_popped +from .signals import appcontext_pushed + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + from .sessions import SessionMixin + from .wrappers import Request + + +# a singleton sentinel value for parameter defaults +_sentinel = object() + + +class _AppCtxGlobals: + """A plain object. Used as a namespace for storing data during an + application context. + + Creating an app context automatically creates this object, which is + made available as the :data:`g` proxy. + + .. describe:: 'key' in g + + Check whether an attribute is present. + + .. versionadded:: 0.10 + + .. describe:: iter(g) + + Return an iterator over the attribute names. + + .. versionadded:: 0.10 + """ + + # Define attr methods to let mypy know this is a namespace object + # that has arbitrary attributes. + + def __getattr__(self, name: str) -> t.Any: + try: + return self.__dict__[name] + except KeyError: + raise AttributeError(name) from None + + def __setattr__(self, name: str, value: t.Any) -> None: + self.__dict__[name] = value + + def __delattr__(self, name: str) -> None: + try: + del self.__dict__[name] + except KeyError: + raise AttributeError(name) from None + + def get(self, name: str, default: t.Optional[t.Any] = None) -> t.Any: + """Get an attribute by name, or a default value. Like + :meth:`dict.get`. + + :param name: Name of attribute to get. + :param default: Value to return if the attribute is not present. + + .. versionadded:: 0.10 + """ + return self.__dict__.get(name, default) + + def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: + """Get and remove an attribute by name. Like :meth:`dict.pop`. + + :param name: Name of attribute to pop. + :param default: Value to return if the attribute is not present, + instead of raising a ``KeyError``. + + .. versionadded:: 0.11 + """ + if default is _sentinel: + return self.__dict__.pop(name) + else: + return self.__dict__.pop(name, default) + + def setdefault(self, name: str, default: t.Any = None) -> t.Any: + """Get the value of an attribute if it is present, otherwise + set and return a default value. Like :meth:`dict.setdefault`. + + :param name: Name of attribute to get. + :param default: Value to set and return if the attribute is not + present. + + .. versionadded:: 0.11 + """ + return self.__dict__.setdefault(name, default) + + def __contains__(self, item: str) -> bool: + return item in self.__dict__ + + def __iter__(self) -> t.Iterator[str]: + return iter(self.__dict__) + + def __repr__(self) -> str: + ctx = _cv_app.get(None) + if ctx is not None: + return f"" + return object.__repr__(self) + + +def after_this_request(f: ft.AfterRequestCallable) -> ft.AfterRequestCallable: + """Executes a function after this request. This is useful to modify + response objects. The function is passed the response object and has + to return the same or a new one. + + Example:: + + @app.route('/') + def index(): + @after_this_request + def add_header(response): + response.headers['X-Foo'] = 'Parachute' + return response + return 'Hello World!' + + This is more useful if a function other than the view function wants to + modify a response. For instance think of a decorator that wants to add + some headers without converting the return value into a response object. + + .. versionadded:: 0.9 + """ + ctx = _cv_request.get(None) + + if ctx is None: + raise RuntimeError( + "'after_this_request' can only be used when a request" + " context is active, such as in a view function." + ) + + ctx._after_request_functions.append(f) + return f + + +def copy_current_request_context(f: t.Callable) -> t.Callable: + """A helper function that decorates a function to retain the current + request context. This is useful when working with greenlets. The moment + the function is decorated a copy of the request context is created and + then pushed when the function is called. The current session is also + included in the copied request context. + + Example:: + + import gevent + from flask import copy_current_request_context + + @app.route('/') + def index(): + @copy_current_request_context + def do_some_work(): + # do some work here, it can access flask.request or + # flask.session like you would otherwise in the view function. + ... + gevent.spawn(do_some_work) + return 'Regular response' + + .. versionadded:: 0.10 + """ + ctx = _cv_request.get(None) + + if ctx is None: + raise RuntimeError( + "'copy_current_request_context' can only be used when a" + " request context is active, such as in a view function." + ) + + ctx = ctx.copy() + + def wrapper(*args, **kwargs): + with ctx: + return ctx.app.ensure_sync(f)(*args, **kwargs) + + return update_wrapper(wrapper, f) + + +def has_request_context() -> bool: + """If you have code that wants to test if a request context is there or + not this function can be used. For instance, you may want to take advantage + of request information if the request object is available, but fail + silently if it is unavailable. + + :: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and has_request_context(): + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + Alternatively you can also just test any of the context bound objects + (such as :class:`request` or :class:`g`) for truthness:: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and request: + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + .. versionadded:: 0.7 + """ + return _cv_request.get(None) is not None + + +def has_app_context() -> bool: + """Works like :func:`has_request_context` but for the application + context. You can also just do a boolean check on the + :data:`current_app` object instead. + + .. versionadded:: 0.9 + """ + return _cv_app.get(None) is not None + + +class AppContext: + """The app context contains application-specific information. An app + context is created and pushed at the beginning of each request if + one is not already active. An app context is also pushed when + running CLI commands. + """ + + def __init__(self, app: "Flask") -> None: + self.app = app + self.url_adapter = app.create_url_adapter(None) + self.g: _AppCtxGlobals = app.app_ctx_globals_class() + self._cv_tokens: t.List[contextvars.Token] = [] + + def push(self) -> None: + """Binds the app context to the current context.""" + self._cv_tokens.append(_cv_app.set(self)) + appcontext_pushed.send(self.app) + + def pop(self, exc: t.Optional[BaseException] = _sentinel) -> None: # type: ignore + """Pops the app context.""" + try: + if len(self._cv_tokens) == 1: + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_appcontext(exc) + finally: + ctx = _cv_app.get() + _cv_app.reset(self._cv_tokens.pop()) + + if ctx is not self: + raise AssertionError( + f"Popped wrong app context. ({ctx!r} instead of {self!r})" + ) + + appcontext_popped.send(self.app) + + def __enter__(self) -> "AppContext": + self.push() + return self + + def __exit__( + self, + exc_type: t.Optional[type], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.pop(exc_value) + + +class RequestContext: + """The request context contains per-request information. The Flask + app creates and pushes it at the beginning of the request, then pops + it at the end of the request. It will create the URL adapter and + request object for the WSGI environment provided. + + Do not attempt to use this class directly, instead use + :meth:`~flask.Flask.test_request_context` and + :meth:`~flask.Flask.request_context` to create this object. + + When the request context is popped, it will evaluate all the + functions registered on the application for teardown execution + (:meth:`~flask.Flask.teardown_request`). + + The request context is automatically popped at the end of the + request. When using the interactive debugger, the context will be + restored so ``request`` is still accessible. Similarly, the test + client can preserve the context after the request ends. However, + teardown functions may already have closed some resources such as + database connections. + """ + + def __init__( + self, + app: "Flask", + environ: dict, + request: t.Optional["Request"] = None, + session: t.Optional["SessionMixin"] = None, + ) -> None: + self.app = app + if request is None: + request = app.request_class(environ) + request.json_module = app.json # type: ignore[misc] + self.request: Request = request + self.url_adapter = None + try: + self.url_adapter = app.create_url_adapter(self.request) + except HTTPException as e: + self.request.routing_exception = e + self.flashes: t.Optional[t.List[t.Tuple[str, str]]] = None + self.session: t.Optional["SessionMixin"] = session + # Functions that should be executed after the request on the response + # object. These will be called before the regular "after_request" + # functions. + self._after_request_functions: t.List[ft.AfterRequestCallable] = [] + + self._cv_tokens: t.List[t.Tuple[contextvars.Token, t.Optional[AppContext]]] = [] + + def copy(self) -> "RequestContext": + """Creates a copy of this request context with the same request object. + This can be used to move a request context to a different greenlet. + Because the actual request object is the same this cannot be used to + move a request context to a different thread unless access to the + request object is locked. + + .. versionadded:: 0.10 + + .. versionchanged:: 1.1 + The current session object is used instead of reloading the original + data. This prevents `flask.session` pointing to an out-of-date object. + """ + return self.__class__( + self.app, + environ=self.request.environ, + request=self.request, + session=self.session, + ) + + def match_request(self) -> None: + """Can be overridden by a subclass to hook into the matching + of the request. + """ + try: + result = self.url_adapter.match(return_rule=True) # type: ignore + self.request.url_rule, self.request.view_args = result # type: ignore + except HTTPException as e: + self.request.routing_exception = e + + def push(self) -> None: + # Before we push the request context we have to ensure that there + # is an application context. + app_ctx = _cv_app.get(None) + + if app_ctx is None or app_ctx.app is not self.app: + app_ctx = self.app.app_context() + app_ctx.push() + else: + app_ctx = None + + self._cv_tokens.append((_cv_request.set(self), app_ctx)) + + # Open the session at the moment that the request context is available. + # This allows a custom open_session method to use the request context. + # Only open a new session if this is the first time the request was + # pushed, otherwise stream_with_context loses the session. + if self.session is None: + session_interface = self.app.session_interface + self.session = session_interface.open_session(self.app, self.request) + + if self.session is None: + self.session = session_interface.make_null_session(self.app) + + # Match the request URL after loading the session, so that the + # session is available in custom URL converters. + if self.url_adapter is not None: + self.match_request() + + def pop(self, exc: t.Optional[BaseException] = _sentinel) -> None: # type: ignore + """Pops the request context and unbinds it by doing that. This will + also trigger the execution of functions registered by the + :meth:`~flask.Flask.teardown_request` decorator. + + .. versionchanged:: 0.9 + Added the `exc` argument. + """ + clear_request = len(self._cv_tokens) == 1 + + try: + if clear_request: + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_request(exc) + + request_close = getattr(self.request, "close", None) + if request_close is not None: + request_close() + finally: + ctx = _cv_request.get() + token, app_ctx = self._cv_tokens.pop() + _cv_request.reset(token) + + # get rid of circular dependencies at the end of the request + # so that we don't require the GC to be active. + if clear_request: + ctx.request.environ["werkzeug.request"] = None + + if app_ctx is not None: + app_ctx.pop(exc) + + if ctx is not self: + raise AssertionError( + f"Popped wrong request context. ({ctx!r} instead of {self!r})" + ) + + def __enter__(self) -> "RequestContext": + self.push() + return self + + def __exit__( + self, + exc_type: t.Optional[type], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.pop(exc_value) + + def __repr__(self) -> str: + return ( + f"<{type(self).__name__} {self.request.url!r}" + f" [{self.request.method}] of {self.app.name}>" + ) diff --git a/venv/lib/python3.8/site-packages/flask/debughelpers.py b/venv/lib/python3.8/site-packages/flask/debughelpers.py new file mode 100644 index 0000000..b063989 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/debughelpers.py @@ -0,0 +1,158 @@ +import typing as t + +from .app import Flask +from .blueprints import Blueprint +from .globals import request_ctx + + +class UnexpectedUnicodeError(AssertionError, UnicodeError): + """Raised in places where we want some better error reporting for + unexpected unicode or binary data. + """ + + +class DebugFilesKeyError(KeyError, AssertionError): + """Raised from request.files during debugging. The idea is that it can + provide a better error message than just a generic KeyError/BadRequest. + """ + + def __init__(self, request, key): + form_matches = request.form.getlist(key) + buf = [ + f"You tried to access the file {key!r} in the request.files" + " dictionary but it does not exist. The mimetype for the" + f" request is {request.mimetype!r} instead of" + " 'multipart/form-data' which means that no file contents" + " were transmitted. To fix this error you should provide" + ' enctype="multipart/form-data" in your form.' + ] + if form_matches: + names = ", ".join(repr(x) for x in form_matches) + buf.append( + "\n\nThe browser instead transmitted some file names. " + f"This was submitted: {names}" + ) + self.msg = "".join(buf) + + def __str__(self): + return self.msg + + +class FormDataRoutingRedirect(AssertionError): + """This exception is raised in debug mode if a routing redirect + would cause the browser to drop the method or body. This happens + when method is not GET, HEAD or OPTIONS and the status code is not + 307 or 308. + """ + + def __init__(self, request): + exc = request.routing_exception + buf = [ + f"A request was sent to '{request.url}', but routing issued" + f" a redirect to the canonical URL '{exc.new_url}'." + ] + + if f"{request.base_url}/" == exc.new_url.partition("?")[0]: + buf.append( + " The URL was defined with a trailing slash. Flask" + " will redirect to the URL with a trailing slash if it" + " was accessed without one." + ) + + buf.append( + " Send requests to the canonical URL, or use 307 or 308 for" + " routing redirects. Otherwise, browsers will drop form" + " data.\n\n" + "This exception is only raised in debug mode." + ) + super().__init__("".join(buf)) + + +def attach_enctype_error_multidict(request): + """Patch ``request.files.__getitem__`` to raise a descriptive error + about ``enctype=multipart/form-data``. + + :param request: The request to patch. + :meta private: + """ + oldcls = request.files.__class__ + + class newcls(oldcls): + def __getitem__(self, key): + try: + return super().__getitem__(key) + except KeyError as e: + if key not in request.form: + raise + + raise DebugFilesKeyError(request, key).with_traceback( + e.__traceback__ + ) from None + + newcls.__name__ = oldcls.__name__ + newcls.__module__ = oldcls.__module__ + request.files.__class__ = newcls + + +def _dump_loader_info(loader) -> t.Generator: + yield f"class: {type(loader).__module__}.{type(loader).__name__}" + for key, value in sorted(loader.__dict__.items()): + if key.startswith("_"): + continue + if isinstance(value, (tuple, list)): + if not all(isinstance(x, str) for x in value): + continue + yield f"{key}:" + for item in value: + yield f" - {item}" + continue + elif not isinstance(value, (str, int, float, bool)): + continue + yield f"{key}: {value!r}" + + +def explain_template_loading_attempts(app: Flask, template, attempts) -> None: + """This should help developers understand what failed""" + info = [f"Locating template {template!r}:"] + total_found = 0 + blueprint = None + if request_ctx and request_ctx.request.blueprint is not None: + blueprint = request_ctx.request.blueprint + + for idx, (loader, srcobj, triple) in enumerate(attempts): + if isinstance(srcobj, Flask): + src_info = f"application {srcobj.import_name!r}" + elif isinstance(srcobj, Blueprint): + src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" + else: + src_info = repr(srcobj) + + info.append(f"{idx + 1:5}: trying loader of {src_info}") + + for line in _dump_loader_info(loader): + info.append(f" {line}") + + if triple is None: + detail = "no match" + else: + detail = f"found ({triple[1] or ''!r})" + total_found += 1 + info.append(f" -> {detail}") + + seems_fishy = False + if total_found == 0: + info.append("Error: the template could not be found.") + seems_fishy = True + elif total_found > 1: + info.append("Warning: multiple loaders returned a match for the template.") + seems_fishy = True + + if blueprint is not None and seems_fishy: + info.append( + " The template was looked up from an endpoint that belongs" + f" to the blueprint {blueprint!r}." + ) + info.append(" Maybe you did not place a template in the right folder?") + info.append(" See https://flask.palletsprojects.com/blueprints/#templates") + + app.logger.info("\n".join(info)) diff --git a/venv/lib/python3.8/site-packages/flask/globals.py b/venv/lib/python3.8/site-packages/flask/globals.py new file mode 100644 index 0000000..b230ef7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/globals.py @@ -0,0 +1,107 @@ +import typing as t +from contextvars import ContextVar + +from werkzeug.local import LocalProxy + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + from .ctx import _AppCtxGlobals + from .ctx import AppContext + from .ctx import RequestContext + from .sessions import SessionMixin + from .wrappers import Request + + +class _FakeStack: + def __init__(self, name: str, cv: ContextVar[t.Any]) -> None: + self.name = name + self.cv = cv + + def _warn(self): + import warnings + + warnings.warn( + f"'_{self.name}_ctx_stack' is deprecated and will be" + " removed in Flask 2.3. Use 'g' to store data, or" + f" '{self.name}_ctx' to access the current context.", + DeprecationWarning, + stacklevel=3, + ) + + def push(self, obj: t.Any) -> None: + self._warn() + self.cv.set(obj) + + def pop(self) -> t.Any: + self._warn() + ctx = self.cv.get(None) + self.cv.set(None) + return ctx + + @property + def top(self) -> t.Optional[t.Any]: + self._warn() + return self.cv.get(None) + + +_no_app_msg = """\ +Working outside of application context. + +This typically means that you attempted to use functionality that needed +the current application. To solve this, set up an application context +with app.app_context(). See the documentation for more information.\ +""" +_cv_app: ContextVar["AppContext"] = ContextVar("flask.app_ctx") +__app_ctx_stack = _FakeStack("app", _cv_app) +app_ctx: "AppContext" = LocalProxy( # type: ignore[assignment] + _cv_app, unbound_message=_no_app_msg +) +current_app: "Flask" = LocalProxy( # type: ignore[assignment] + _cv_app, "app", unbound_message=_no_app_msg +) +g: "_AppCtxGlobals" = LocalProxy( # type: ignore[assignment] + _cv_app, "g", unbound_message=_no_app_msg +) + +_no_req_msg = """\ +Working outside of request context. + +This typically means that you attempted to use functionality that needed +an active HTTP request. Consult the documentation on testing for +information about how to avoid this problem.\ +""" +_cv_request: ContextVar["RequestContext"] = ContextVar("flask.request_ctx") +__request_ctx_stack = _FakeStack("request", _cv_request) +request_ctx: "RequestContext" = LocalProxy( # type: ignore[assignment] + _cv_request, unbound_message=_no_req_msg +) +request: "Request" = LocalProxy( # type: ignore[assignment] + _cv_request, "request", unbound_message=_no_req_msg +) +session: "SessionMixin" = LocalProxy( # type: ignore[assignment] + _cv_request, "session", unbound_message=_no_req_msg +) + + +def __getattr__(name: str) -> t.Any: + if name == "_app_ctx_stack": + import warnings + + warnings.warn( + "'_app_ctx_stack' is deprecated and will be remoevd in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + return __app_ctx_stack + + if name == "_request_ctx_stack": + import warnings + + warnings.warn( + "'_request_ctx_stack' is deprecated and will be remoevd in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + return __request_ctx_stack + + raise AttributeError(name) diff --git a/venv/lib/python3.8/site-packages/flask/helpers.py b/venv/lib/python3.8/site-packages/flask/helpers.py new file mode 100644 index 0000000..15990d0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/helpers.py @@ -0,0 +1,705 @@ +import os +import pkgutil +import socket +import sys +import typing as t +from datetime import datetime +from functools import lru_cache +from functools import update_wrapper +from threading import RLock + +import werkzeug.utils +from werkzeug.exceptions import abort as _wz_abort +from werkzeug.utils import redirect as _wz_redirect + +from .globals import _cv_request +from .globals import current_app +from .globals import request +from .globals import request_ctx +from .globals import session +from .signals import message_flashed + +if t.TYPE_CHECKING: # pragma: no cover + from werkzeug.wrappers import Response as BaseResponse + from .wrappers import Response + import typing_extensions as te + + +def get_env() -> str: + """Get the environment the app is running in, indicated by the + :envvar:`FLASK_ENV` environment variable. The default is + ``'production'``. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. + """ + import warnings + + warnings.warn( + "'FLASK_ENV' and 'get_env' are deprecated and will be removed" + " in Flask 2.3. Use 'FLASK_DEBUG' instead.", + DeprecationWarning, + stacklevel=2, + ) + return os.environ.get("FLASK_ENV") or "production" + + +def get_debug_flag() -> bool: + """Get whether debug mode should be enabled for the app, indicated by the + :envvar:`FLASK_DEBUG` environment variable. The default is ``False``. + """ + val = os.environ.get("FLASK_DEBUG") + + if not val: + env = os.environ.get("FLASK_ENV") + + if env is not None: + print( + "'FLASK_ENV' is deprecated and will not be used in" + " Flask 2.3. Use 'FLASK_DEBUG' instead.", + file=sys.stderr, + ) + return env == "development" + + return False + + return val.lower() not in {"0", "false", "no"} + + +def get_load_dotenv(default: bool = True) -> bool: + """Get whether the user has disabled loading default dotenv files by + setting :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load + the files. + + :param default: What to return if the env var isn't set. + """ + val = os.environ.get("FLASK_SKIP_DOTENV") + + if not val: + return default + + return val.lower() in ("0", "false", "no") + + +def stream_with_context( + generator_or_function: t.Union[ + t.Iterator[t.AnyStr], t.Callable[..., t.Iterator[t.AnyStr]] + ] +) -> t.Iterator[t.AnyStr]: + """Request contexts disappear when the response is started on the server. + This is done for efficiency reasons and to make it less likely to encounter + memory leaks with badly written WSGI middlewares. The downside is that if + you are using streamed responses, the generator cannot access request bound + information any more. + + This function however can help you keep the context around for longer:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + @stream_with_context + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(generate()) + + Alternatively it can also be used around a specific generator:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(stream_with_context(generate())) + + .. versionadded:: 0.9 + """ + try: + gen = iter(generator_or_function) # type: ignore + except TypeError: + + def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: + gen = generator_or_function(*args, **kwargs) # type: ignore + return stream_with_context(gen) + + return update_wrapper(decorator, generator_or_function) # type: ignore + + def generator() -> t.Generator: + ctx = _cv_request.get(None) + if ctx is None: + raise RuntimeError( + "'stream_with_context' can only be used when a request" + " context is active, such as in a view function." + ) + with ctx: + # Dummy sentinel. Has to be inside the context block or we're + # not actually keeping the context around. + yield None + + # The try/finally is here so that if someone passes a WSGI level + # iterator in we're still running the cleanup logic. Generators + # don't need that because they are closed on their destruction + # automatically. + try: + yield from gen + finally: + if hasattr(gen, "close"): + gen.close() # type: ignore + + # The trick is to start the generator. Then the code execution runs until + # the first dummy None is yielded at which point the context was already + # pushed. This item is discarded. Then when the iteration continues the + # real generator is executed. + wrapped_g = generator() + next(wrapped_g) + return wrapped_g + + +def make_response(*args: t.Any) -> "Response": + """Sometimes it is necessary to set additional headers in a view. Because + views do not have to return response objects but can return a value that + is converted into a response object by Flask itself, it becomes tricky to + add headers to it. This function can be called instead of using a return + and you will get a response object which you can use to attach headers. + + If view looked like this and you want to add a new header:: + + def index(): + return render_template('index.html', foo=42) + + You can now do something like this:: + + def index(): + response = make_response(render_template('index.html', foo=42)) + response.headers['X-Parachutes'] = 'parachutes are cool' + return response + + This function accepts the very same arguments you can return from a + view function. This for example creates a response with a 404 error + code:: + + response = make_response(render_template('not_found.html'), 404) + + The other use case of this function is to force the return value of a + view function into a response which is helpful with view + decorators:: + + response = make_response(view_function()) + response.headers['X-Parachutes'] = 'parachutes are cool' + + Internally this function does the following things: + + - if no arguments are passed, it creates a new response argument + - if one argument is passed, :meth:`flask.Flask.make_response` + is invoked with it. + - if more than one argument is passed, the arguments are passed + to the :meth:`flask.Flask.make_response` function as tuple. + + .. versionadded:: 0.6 + """ + if not args: + return current_app.response_class() + if len(args) == 1: + args = args[0] + return current_app.make_response(args) # type: ignore + + +def url_for( + endpoint: str, + *, + _anchor: t.Optional[str] = None, + _method: t.Optional[str] = None, + _scheme: t.Optional[str] = None, + _external: t.Optional[bool] = None, + **values: t.Any, +) -> str: + """Generate a URL to the given endpoint with the given values. + + This requires an active request or application context, and calls + :meth:`current_app.url_for() `. See that method + for full documentation. + + :param endpoint: The endpoint name associated with the URL to + generate. If this starts with a ``.``, the current blueprint + name (if any) will be used. + :param _anchor: If given, append this as ``#anchor`` to the URL. + :param _method: If given, generate the URL associated with this + method for the endpoint. + :param _scheme: If given, the URL will have this scheme if it is + external. + :param _external: If given, prefer the URL to be internal (False) or + require it to be external (True). External URLs include the + scheme and domain. When not in an active request, URLs are + external by default. + :param values: Values to use for the variable parts of the URL rule. + Unknown keys are appended as query string arguments, like + ``?a=b&c=d``. + + .. versionchanged:: 2.2 + Calls ``current_app.url_for``, allowing an app to override the + behavior. + + .. versionchanged:: 0.10 + The ``_scheme`` parameter was added. + + .. versionchanged:: 0.9 + The ``_anchor`` and ``_method`` parameters were added. + + .. versionchanged:: 0.9 + Calls ``app.handle_url_build_error`` on build errors. + """ + return current_app.url_for( + endpoint, + _anchor=_anchor, + _method=_method, + _scheme=_scheme, + _external=_external, + **values, + ) + + +def redirect( + location: str, code: int = 302, Response: t.Optional[t.Type["BaseResponse"]] = None +) -> "BaseResponse": + """Create a redirect response object. + + If :data:`~flask.current_app` is available, it will use its + :meth:`~flask.Flask.redirect` method, otherwise it will use + :func:`werkzeug.utils.redirect`. + + :param location: The URL to redirect to. + :param code: The status code for the redirect. + :param Response: The response class to use. Not used when + ``current_app`` is active, which uses ``app.response_class``. + + .. versionadded:: 2.2 + Calls ``current_app.redirect`` if available instead of always + using Werkzeug's default ``redirect``. + """ + if current_app: + return current_app.redirect(location, code=code) + + return _wz_redirect(location, code=code, Response=Response) + + +def abort( # type: ignore[misc] + code: t.Union[int, "BaseResponse"], *args: t.Any, **kwargs: t.Any +) -> "te.NoReturn": + """Raise an :exc:`~werkzeug.exceptions.HTTPException` for the given + status code. + + If :data:`~flask.current_app` is available, it will call its + :attr:`~flask.Flask.aborter` object, otherwise it will use + :func:`werkzeug.exceptions.abort`. + + :param code: The status code for the exception, which must be + registered in ``app.aborter``. + :param args: Passed to the exception. + :param kwargs: Passed to the exception. + + .. versionadded:: 2.2 + Calls ``current_app.aborter`` if available instead of always + using Werkzeug's default ``abort``. + """ + if current_app: + current_app.aborter(code, *args, **kwargs) + + _wz_abort(code, *args, **kwargs) + + +def get_template_attribute(template_name: str, attribute: str) -> t.Any: + """Loads a macro (or variable) a template exports. This can be used to + invoke a macro from within Python code. If you for example have a + template named :file:`_cider.html` with the following contents: + + .. sourcecode:: html+jinja + + {% macro hello(name) %}Hello {{ name }}!{% endmacro %} + + You can access this from Python code like this:: + + hello = get_template_attribute('_cider.html', 'hello') + return hello('World') + + .. versionadded:: 0.2 + + :param template_name: the name of the template + :param attribute: the name of the variable of macro to access + """ + return getattr(current_app.jinja_env.get_template(template_name).module, attribute) + + +def flash(message: str, category: str = "message") -> None: + """Flashes a message to the next request. In order to remove the + flashed message from the session and to display it to the user, + the template has to call :func:`get_flashed_messages`. + + .. versionchanged:: 0.3 + `category` parameter added. + + :param message: the message to be flashed. + :param category: the category for the message. The following values + are recommended: ``'message'`` for any kind of message, + ``'error'`` for errors, ``'info'`` for information + messages and ``'warning'`` for warnings. However any + kind of string can be used as category. + """ + # Original implementation: + # + # session.setdefault('_flashes', []).append((category, message)) + # + # This assumed that changes made to mutable structures in the session are + # always in sync with the session object, which is not true for session + # implementations that use external storage for keeping their keys/values. + flashes = session.get("_flashes", []) + flashes.append((category, message)) + session["_flashes"] = flashes + message_flashed.send( + current_app._get_current_object(), # type: ignore + message=message, + category=category, + ) + + +def get_flashed_messages( + with_categories: bool = False, category_filter: t.Iterable[str] = () +) -> t.Union[t.List[str], t.List[t.Tuple[str, str]]]: + """Pulls all flashed messages from the session and returns them. + Further calls in the same request to the function will return + the same messages. By default just the messages are returned, + but when `with_categories` is set to ``True``, the return value will + be a list of tuples in the form ``(category, message)`` instead. + + Filter the flashed messages to one or more categories by providing those + categories in `category_filter`. This allows rendering categories in + separate html blocks. The `with_categories` and `category_filter` + arguments are distinct: + + * `with_categories` controls whether categories are returned with message + text (``True`` gives a tuple, where ``False`` gives just the message text). + * `category_filter` filters the messages down to only those matching the + provided categories. + + See :doc:`/patterns/flashing` for examples. + + .. versionchanged:: 0.3 + `with_categories` parameter added. + + .. versionchanged:: 0.9 + `category_filter` parameter added. + + :param with_categories: set to ``True`` to also receive categories. + :param category_filter: filter of categories to limit return values. Only + categories in the list will be returned. + """ + flashes = request_ctx.flashes + if flashes is None: + flashes = session.pop("_flashes") if "_flashes" in session else [] + request_ctx.flashes = flashes + if category_filter: + flashes = list(filter(lambda f: f[0] in category_filter, flashes)) + if not with_categories: + return [x[1] for x in flashes] + return flashes + + +def _prepare_send_file_kwargs(**kwargs: t.Any) -> t.Dict[str, t.Any]: + if kwargs.get("max_age") is None: + kwargs["max_age"] = current_app.get_send_file_max_age + + kwargs.update( + environ=request.environ, + use_x_sendfile=current_app.config["USE_X_SENDFILE"], + response_class=current_app.response_class, + _root_path=current_app.root_path, # type: ignore + ) + return kwargs + + +def send_file( + path_or_file: t.Union[os.PathLike, str, t.BinaryIO], + mimetype: t.Optional[str] = None, + as_attachment: bool = False, + download_name: t.Optional[str] = None, + conditional: bool = True, + etag: t.Union[bool, str] = True, + last_modified: t.Optional[t.Union[datetime, int, float]] = None, + max_age: t.Optional[ + t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]] + ] = None, +) -> "Response": + """Send the contents of a file to the client. + + The first argument can be a file path or a file-like object. Paths + are preferred in most cases because Werkzeug can manage the file and + get extra information from the path. Passing a file-like object + requires that the file is opened in binary mode, and is mostly + useful when building a file in memory with :class:`io.BytesIO`. + + Never pass file paths provided by a user. The path is assumed to be + trusted, so a user could craft a path to access a file you didn't + intend. Use :func:`send_from_directory` to safely serve + user-requested paths from within a directory. + + If the WSGI server sets a ``file_wrapper`` in ``environ``, it is + used, otherwise Werkzeug's built-in wrapper is used. Alternatively, + if the HTTP server supports ``X-Sendfile``, configuring Flask with + ``USE_X_SENDFILE = True`` will tell the server to send the given + path, which is much more efficient than reading it in Python. + + :param path_or_file: The path to the file to send, relative to the + current working directory if a relative path is given. + Alternatively, a file-like object opened in binary mode. Make + sure the file pointer is seeked to the start of the data. + :param mimetype: The MIME type to send for the file. If not + provided, it will try to detect it from the file name. + :param as_attachment: Indicate to a browser that it should offer to + save the file instead of displaying it. + :param download_name: The default name browsers will use when saving + the file. Defaults to the passed file name. + :param conditional: Enable conditional and range responses based on + request headers. Requires passing a file path and ``environ``. + :param etag: Calculate an ETag for the file, which requires passing + a file path. Can also be a string to use instead. + :param last_modified: The last modified time to send for the file, + in seconds. If not provided, it will try to detect it from the + file path. + :param max_age: How long the client should cache the file, in + seconds. If set, ``Cache-Control`` will be ``public``, otherwise + it will be ``no-cache`` to prefer conditional caching. + + .. versionchanged:: 2.0 + ``download_name`` replaces the ``attachment_filename`` + parameter. If ``as_attachment=False``, it is passed with + ``Content-Disposition: inline`` instead. + + .. versionchanged:: 2.0 + ``max_age`` replaces the ``cache_timeout`` parameter. + ``conditional`` is enabled and ``max_age`` is not set by + default. + + .. versionchanged:: 2.0 + ``etag`` replaces the ``add_etags`` parameter. It can be a + string to use instead of generating one. + + .. versionchanged:: 2.0 + Passing a file-like object that inherits from + :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather + than sending an empty file. + + .. versionadded:: 2.0 + Moved the implementation to Werkzeug. This is now a wrapper to + pass some Flask-specific arguments. + + .. versionchanged:: 1.1 + ``filename`` may be a :class:`~os.PathLike` object. + + .. versionchanged:: 1.1 + Passing a :class:`~io.BytesIO` object supports range requests. + + .. versionchanged:: 1.0.3 + Filenames are encoded with ASCII instead of Latin-1 for broader + compatibility with WSGI servers. + + .. versionchanged:: 1.0 + UTF-8 filenames as specified in :rfc:`2231` are supported. + + .. versionchanged:: 0.12 + The filename is no longer automatically inferred from file + objects. If you want to use automatic MIME and etag support, + pass a filename via ``filename_or_fp`` or + ``attachment_filename``. + + .. versionchanged:: 0.12 + ``attachment_filename`` is preferred over ``filename`` for MIME + detection. + + .. versionchanged:: 0.9 + ``cache_timeout`` defaults to + :meth:`Flask.get_send_file_max_age`. + + .. versionchanged:: 0.7 + MIME guessing and etag support for file-like objects was + deprecated because it was unreliable. Pass a filename if you are + able to, otherwise attach an etag yourself. + + .. versionchanged:: 0.5 + The ``add_etags``, ``cache_timeout`` and ``conditional`` + parameters were added. The default behavior is to add etags. + + .. versionadded:: 0.2 + """ + return werkzeug.utils.send_file( # type: ignore[return-value] + **_prepare_send_file_kwargs( + path_or_file=path_or_file, + environ=request.environ, + mimetype=mimetype, + as_attachment=as_attachment, + download_name=download_name, + conditional=conditional, + etag=etag, + last_modified=last_modified, + max_age=max_age, + ) + ) + + +def send_from_directory( + directory: t.Union[os.PathLike, str], + path: t.Union[os.PathLike, str], + **kwargs: t.Any, +) -> "Response": + """Send a file from within a directory using :func:`send_file`. + + .. code-block:: python + + @app.route("/uploads/") + def download_file(name): + return send_from_directory( + app.config['UPLOAD_FOLDER'], name, as_attachment=True + ) + + This is a secure way to serve files from a folder, such as static + files or uploads. Uses :func:`~werkzeug.security.safe_join` to + ensure the path coming from the client is not maliciously crafted to + point outside the specified directory. + + If the final path does not point to an existing regular file, + raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. + + :param directory: The directory that ``path`` must be located under, + relative to the current application's root path. + :param path: The path to the file to send, relative to + ``directory``. + :param kwargs: Arguments to pass to :func:`send_file`. + + .. versionchanged:: 2.0 + ``path`` replaces the ``filename`` parameter. + + .. versionadded:: 2.0 + Moved the implementation to Werkzeug. This is now a wrapper to + pass some Flask-specific arguments. + + .. versionadded:: 0.5 + """ + return werkzeug.utils.send_from_directory( # type: ignore[return-value] + directory, path, **_prepare_send_file_kwargs(**kwargs) + ) + + +def get_root_path(import_name: str) -> str: + """Find the root path of a package, or the path that contains a + module. If it cannot be found, returns the current working + directory. + + Not to be confused with the value returned by :func:`find_package`. + + :meta private: + """ + # Module already imported and has a file attribute. Use that first. + mod = sys.modules.get(import_name) + + if mod is not None and hasattr(mod, "__file__") and mod.__file__ is not None: + return os.path.dirname(os.path.abspath(mod.__file__)) + + # Next attempt: check the loader. + loader = pkgutil.get_loader(import_name) + + # Loader does not exist or we're referring to an unloaded main + # module or a main module without path (interactive sessions), go + # with the current working directory. + if loader is None or import_name == "__main__": + return os.getcwd() + + if hasattr(loader, "get_filename"): + filepath = loader.get_filename(import_name) # type: ignore + else: + # Fall back to imports. + __import__(import_name) + mod = sys.modules[import_name] + filepath = getattr(mod, "__file__", None) + + # If we don't have a file path it might be because it is a + # namespace package. In this case pick the root path from the + # first module that is contained in the package. + if filepath is None: + raise RuntimeError( + "No root path can be found for the provided module" + f" {import_name!r}. This can happen because the module" + " came from an import hook that does not provide file" + " name information or because it's a namespace package." + " In this case the root path needs to be explicitly" + " provided." + ) + + # filepath is import_name.py for a module, or __init__.py for a package. + return os.path.dirname(os.path.abspath(filepath)) + + +class locked_cached_property(werkzeug.utils.cached_property): + """A :func:`property` that is only evaluated once. Like + :class:`werkzeug.utils.cached_property` except access uses a lock + for thread safety. + + .. versionchanged:: 2.0 + Inherits from Werkzeug's ``cached_property`` (and ``property``). + """ + + def __init__( + self, + fget: t.Callable[[t.Any], t.Any], + name: t.Optional[str] = None, + doc: t.Optional[str] = None, + ) -> None: + super().__init__(fget, name=name, doc=doc) + self.lock = RLock() + + def __get__(self, obj: object, type: type = None) -> t.Any: # type: ignore + if obj is None: + return self + + with self.lock: + return super().__get__(obj, type=type) + + def __set__(self, obj: object, value: t.Any) -> None: + with self.lock: + super().__set__(obj, value) + + def __delete__(self, obj: object) -> None: + with self.lock: + super().__delete__(obj) + + +def is_ip(value: str) -> bool: + """Determine if the given string is an IP address. + + :param value: value to check + :type value: str + + :return: True if string is an IP address + :rtype: bool + """ + for family in (socket.AF_INET, socket.AF_INET6): + try: + socket.inet_pton(family, value) + except OSError: + pass + else: + return True + + return False + + +@lru_cache(maxsize=None) +def _split_blueprint_path(name: str) -> t.List[str]: + out: t.List[str] = [name] + + if "." in name: + out.extend(_split_blueprint_path(name.rpartition(".")[0])) + + return out diff --git a/venv/lib/python3.8/site-packages/flask/json/__init__.py b/venv/lib/python3.8/site-packages/flask/json/__init__.py new file mode 100644 index 0000000..65d8829 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/json/__init__.py @@ -0,0 +1,342 @@ +from __future__ import annotations + +import json as _json +import typing as t + +from jinja2.utils import htmlsafe_json_dumps as _jinja_htmlsafe_dumps + +from ..globals import current_app +from .provider import _default + +if t.TYPE_CHECKING: # pragma: no cover + from ..app import Flask + from ..wrappers import Response + + +class JSONEncoder(_json.JSONEncoder): + """The default JSON encoder. Handles extra types compared to the + built-in :class:`json.JSONEncoder`. + + - :class:`datetime.datetime` and :class:`datetime.date` are + serialized to :rfc:`822` strings. This is the same as the HTTP + date format. + - :class:`decimal.Decimal` is serialized to a string. + - :class:`uuid.UUID` is serialized to a string. + - :class:`dataclasses.dataclass` is passed to + :func:`dataclasses.asdict`. + - :class:`~markupsafe.Markup` (or any object with a ``__html__`` + method) will call the ``__html__`` method to get a string. + + Assign a subclass of this to :attr:`flask.Flask.json_encoder` or + :attr:`flask.Blueprint.json_encoder` to override the default. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.json`` instead. + """ + + def __init__(self, **kwargs) -> None: + import warnings + + warnings.warn( + "'JSONEncoder' is deprecated and will be removed in" + " Flask 2.3. Use 'Flask.json' to provide an alternate" + " JSON implementation instead.", + DeprecationWarning, + stacklevel=3, + ) + super().__init__(**kwargs) + + def default(self, o: t.Any) -> t.Any: + """Convert ``o`` to a JSON serializable type. See + :meth:`json.JSONEncoder.default`. Python does not support + overriding how basic types like ``str`` or ``list`` are + serialized, they are handled before this method. + """ + return _default(o) + + +class JSONDecoder(_json.JSONDecoder): + """The default JSON decoder. + + This does not change any behavior from the built-in + :class:`json.JSONDecoder`. + + Assign a subclass of this to :attr:`flask.Flask.json_decoder` or + :attr:`flask.Blueprint.json_decoder` to override the default. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.json`` instead. + """ + + def __init__(self, **kwargs) -> None: + import warnings + + warnings.warn( + "'JSONDecoder' is deprecated and will be removed in" + " Flask 2.3. Use 'Flask.json' to provide an alternate" + " JSON implementation instead.", + DeprecationWarning, + stacklevel=3, + ) + super().__init__(**kwargs) + + +def dumps(obj: t.Any, *, app: Flask | None = None, **kwargs: t.Any) -> str: + """Serialize data as JSON. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.dumps() ` + method, otherwise it will use :func:`json.dumps`. + + :param obj: The data to serialize. + :param kwargs: Arguments passed to the ``dumps`` implementation. + + .. versionchanged:: 2.2 + Calls ``current_app.json.dumps``, allowing an app to override + the behavior. + + .. versionchanged:: 2.2 + The ``app`` parameter will be removed in Flask 2.3. + + .. versionchanged:: 2.0.2 + :class:`decimal.Decimal` is supported by converting to a string. + + .. versionchanged:: 2.0 + ``encoding`` will be removed in Flask 2.1. + + .. versionchanged:: 1.0.3 + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + if app is not None: + import warnings + + warnings.warn( + "The 'app' parameter is deprecated and will be removed in" + " Flask 2.3. Call 'app.json.dumps' directly instead.", + DeprecationWarning, + stacklevel=2, + ) + else: + app = current_app + + if app: + return app.json.dumps(obj, **kwargs) + + kwargs.setdefault("default", _default) + return _json.dumps(obj, **kwargs) + + +def dump( + obj: t.Any, fp: t.IO[str], *, app: Flask | None = None, **kwargs: t.Any +) -> None: + """Serialize data as JSON and write to a file. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.dump() ` + method, otherwise it will use :func:`json.dump`. + + :param obj: The data to serialize. + :param fp: A file opened for writing text. Should use the UTF-8 + encoding to be valid JSON. + :param kwargs: Arguments passed to the ``dump`` implementation. + + .. versionchanged:: 2.2 + Calls ``current_app.json.dump``, allowing an app to override + the behavior. + + .. versionchanged:: 2.2 + The ``app`` parameter will be removed in Flask 2.3. + + .. versionchanged:: 2.0 + Writing to a binary file, and the ``encoding`` argument, will be + removed in Flask 2.1. + """ + if app is not None: + import warnings + + warnings.warn( + "The 'app' parameter is deprecated and will be removed in" + " Flask 2.3. Call 'app.json.dump' directly instead.", + DeprecationWarning, + stacklevel=2, + ) + else: + app = current_app + + if app: + app.json.dump(obj, fp, **kwargs) + else: + kwargs.setdefault("default", _default) + _json.dump(obj, fp, **kwargs) + + +def loads(s: str | bytes, *, app: Flask | None = None, **kwargs: t.Any) -> t.Any: + """Deserialize data as JSON. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.loads() ` + method, otherwise it will use :func:`json.loads`. + + :param s: Text or UTF-8 bytes. + :param kwargs: Arguments passed to the ``loads`` implementation. + + .. versionchanged:: 2.2 + Calls ``current_app.json.loads``, allowing an app to override + the behavior. + + .. versionchanged:: 2.2 + The ``app`` parameter will be removed in Flask 2.3. + + .. versionchanged:: 2.0 + ``encoding`` will be removed in Flask 2.1. The data must be a + string or UTF-8 bytes. + + .. versionchanged:: 1.0.3 + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + if app is not None: + import warnings + + warnings.warn( + "The 'app' parameter is deprecated and will be removed in" + " Flask 2.3. Call 'app.json.loads' directly instead.", + DeprecationWarning, + stacklevel=2, + ) + else: + app = current_app + + if app: + return app.json.loads(s, **kwargs) + + return _json.loads(s, **kwargs) + + +def load(fp: t.IO[t.AnyStr], *, app: Flask | None = None, **kwargs: t.Any) -> t.Any: + """Deserialize data as JSON read from a file. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.load() ` + method, otherwise it will use :func:`json.load`. + + :param fp: A file opened for reading text or UTF-8 bytes. + :param kwargs: Arguments passed to the ``load`` implementation. + + .. versionchanged:: 2.2 + Calls ``current_app.json.load``, allowing an app to override + the behavior. + + .. versionchanged:: 2.2 + The ``app`` parameter will be removed in Flask 2.3. + + .. versionchanged:: 2.0 + ``encoding`` will be removed in Flask 2.1. The file must be text + mode, or binary mode with UTF-8 bytes. + """ + if app is not None: + import warnings + + warnings.warn( + "The 'app' parameter is deprecated and will be removed in" + " Flask 2.3. Call 'app.json.load' directly instead.", + DeprecationWarning, + stacklevel=2, + ) + else: + app = current_app + + if app: + return app.json.load(fp, **kwargs) + + return _json.load(fp, **kwargs) + + +def htmlsafe_dumps(obj: t.Any, **kwargs: t.Any) -> str: + """Serialize an object to a string of JSON with :func:`dumps`, then + replace HTML-unsafe characters with Unicode escapes and mark the + result safe with :class:`~markupsafe.Markup`. + + This is available in templates as the ``|tojson`` filter. + + The returned string is safe to render in HTML documents and + `` + + + +
    +""" + +FOOTER = """\ + +
    + +
    +
    +

    Console Locked

    +

    + The console is locked and needs to be unlocked by entering the PIN. + You can find the PIN printed out on the standard output of your + shell that runs the server. +

    +

    PIN: + + +

    +
    +
    + + +""" + +PAGE_HTML = ( + HEADER + + """\ +

    %(exception_type)s

    +
    +

    %(exception)s

    +
    +

    Traceback (most recent call last)

    +%(summary)s +
    +

    + This is the Copy/Paste friendly version of the traceback. +

    + +
    +
    + The debugger caught an exception in your WSGI application. You can now + look at the traceback which led to the error. + If you enable JavaScript you can also use additional features such as code + execution (if the evalex feature is enabled), automatic pasting of the + exceptions and much more. +
    +""" + + FOOTER + + """ + +""" +) + +CONSOLE_HTML = ( + HEADER + + """\ +

    Interactive Console

    +
    +In this console you can execute Python expressions in the context of the +application. The initial namespace was created by the debugger automatically. +
    +
    The Console requires JavaScript.
    +""" + + FOOTER +) + +SUMMARY_HTML = """\ +
    + %(title)s +
      %(frames)s
    + %(description)s +
    +""" + +FRAME_HTML = """\ +
    +

    File "%(filename)s", + line %(lineno)s, + in %(function_name)s

    +
    %(lines)s
    +
    +""" + + +def _process_traceback( + exc: BaseException, + te: t.Optional[traceback.TracebackException] = None, + *, + skip: int = 0, + hide: bool = True, +) -> traceback.TracebackException: + if te is None: + te = traceback.TracebackException.from_exception(exc, lookup_lines=False) + + # Get the frames the same way StackSummary.extract did, in order + # to match each frame with the FrameSummary to augment. + frame_gen = traceback.walk_tb(exc.__traceback__) + limit = getattr(sys, "tracebacklimit", None) + + if limit is not None: + if limit < 0: + limit = 0 + + frame_gen = itertools.islice(frame_gen, limit) + + if skip: + frame_gen = itertools.islice(frame_gen, skip, None) + del te.stack[:skip] + + new_stack: t.List[DebugFrameSummary] = [] + hidden = False + + # Match each frame with the FrameSummary that was generated. + # Hide frames using Paste's __traceback_hide__ rules. Replace + # all visible FrameSummary with DebugFrameSummary. + for (f, _), fs in zip(frame_gen, te.stack): + if hide: + hide_value = f.f_locals.get("__traceback_hide__", False) + + if hide_value in {"before", "before_and_this"}: + new_stack = [] + hidden = False + + if hide_value == "before_and_this": + continue + elif hide_value in {"reset", "reset_and_this"}: + hidden = False + + if hide_value == "reset_and_this": + continue + elif hide_value in {"after", "after_and_this"}: + hidden = True + + if hide_value == "after_and_this": + continue + elif hide_value or hidden: + continue + + frame_args: t.Dict[str, t.Any] = { + "filename": fs.filename, + "lineno": fs.lineno, + "name": fs.name, + "locals": f.f_locals, + "globals": f.f_globals, + } + + if hasattr(fs, "colno"): + frame_args["colno"] = fs.colno # type: ignore[attr-defined] + frame_args["end_colno"] = fs.end_colno # type: ignore[attr-defined] + + new_stack.append(DebugFrameSummary(**frame_args)) + + # The codeop module is used to compile code from the interactive + # debugger. Hide any codeop frames from the bottom of the traceback. + while new_stack: + module = new_stack[0].global_ns.get("__name__") + + if module is None: + module = new_stack[0].local_ns.get("__name__") + + if module == "codeop": + del new_stack[0] + else: + break + + te.stack[:] = new_stack + + if te.__context__: + context_exc = t.cast(BaseException, exc.__context__) + te.__context__ = _process_traceback(context_exc, te.__context__, hide=hide) + + if te.__cause__: + cause_exc = t.cast(BaseException, exc.__cause__) + te.__cause__ = _process_traceback(cause_exc, te.__cause__, hide=hide) + + return te + + +class DebugTraceback: + __slots__ = ("_te", "_cache_all_tracebacks", "_cache_all_frames") + + def __init__( + self, + exc: BaseException, + te: t.Optional[traceback.TracebackException] = None, + *, + skip: int = 0, + hide: bool = True, + ) -> None: + self._te = _process_traceback(exc, te, skip=skip, hide=hide) + + def __str__(self) -> str: + return f"<{type(self).__name__} {self._te}>" + + @cached_property + def all_tracebacks( + self, + ) -> t.List[t.Tuple[t.Optional[str], traceback.TracebackException]]: + out = [] + current = self._te + + while current is not None: + if current.__cause__ is not None: + chained_msg = ( + "The above exception was the direct cause of the" + " following exception" + ) + chained_exc = current.__cause__ + elif current.__context__ is not None and not current.__suppress_context__: + chained_msg = ( + "During handling of the above exception, another" + " exception occurred" + ) + chained_exc = current.__context__ + else: + chained_msg = None + chained_exc = None + + out.append((chained_msg, current)) + current = chained_exc + + return out + + @cached_property + def all_frames(self) -> t.List["DebugFrameSummary"]: + return [ + f for _, te in self.all_tracebacks for f in te.stack # type: ignore[misc] + ] + + def render_traceback_text(self) -> str: + return "".join(self._te.format()) + + def render_traceback_html(self, include_title: bool = True) -> str: + library_frames = [f.is_library for f in self.all_frames] + mark_library = 0 < sum(library_frames) < len(library_frames) + rows = [] + + if not library_frames: + classes = "traceback noframe-traceback" + else: + classes = "traceback" + + for msg, current in reversed(self.all_tracebacks): + row_parts = [] + + if msg is not None: + row_parts.append(f'
  • {msg}:
    ') + + for frame in current.stack: + frame = t.cast(DebugFrameSummary, frame) + info = f' title="{escape(frame.info)}"' if frame.info else "" + row_parts.append(f"{frame.render_html(mark_library)}") + + rows.append("\n".join(row_parts)) + + is_syntax_error = issubclass(self._te.exc_type, SyntaxError) + + if include_title: + if is_syntax_error: + title = "Syntax Error" + else: + title = "Traceback (most recent call last):" + else: + title = "" + + exc_full = escape("".join(self._te.format_exception_only())) + + if is_syntax_error: + description = f"
    {exc_full}
    " + else: + description = f"
    {exc_full}
    " + + return SUMMARY_HTML % { + "classes": classes, + "title": f"

    {title}

    ", + "frames": "\n".join(rows), + "description": description, + } + + def render_debugger_html( + self, evalex: bool, secret: str, evalex_trusted: bool + ) -> str: + exc_lines = list(self._te.format_exception_only()) + plaintext = "".join(self._te.format()) + return PAGE_HTML % { + "evalex": "true" if evalex else "false", + "evalex_trusted": "true" if evalex_trusted else "false", + "console": "false", + "title": exc_lines[0], + "exception": escape("".join(exc_lines)), + "exception_type": escape(self._te.exc_type.__name__), + "summary": self.render_traceback_html(include_title=False), + "plaintext": escape(plaintext), + "plaintext_cs": re.sub("-{2,}", "-", plaintext), + "secret": secret, + } + + +class DebugFrameSummary(traceback.FrameSummary): + """A :class:`traceback.FrameSummary` that can evaluate code in the + frame's namespace. + """ + + __slots__ = ( + "local_ns", + "global_ns", + "_cache_info", + "_cache_is_library", + "_cache_console", + ) + + def __init__( + self, + *, + locals: t.Dict[str, t.Any], + globals: t.Dict[str, t.Any], + **kwargs: t.Any, + ) -> None: + super().__init__(locals=None, **kwargs) + self.local_ns = locals + self.global_ns = globals + + @cached_property + def info(self) -> t.Optional[str]: + return self.local_ns.get("__traceback_info__") + + @cached_property + def is_library(self) -> bool: + return any( + self.filename.startswith((path, os.path.realpath(path))) + for path in sysconfig.get_paths().values() + ) + + @cached_property + def console(self) -> Console: + return Console(self.global_ns, self.local_ns) + + def eval(self, code: str) -> t.Any: + return self.console.eval(code) + + def render_html(self, mark_library: bool) -> str: + context = 5 + lines = linecache.getlines(self.filename) + line_idx = self.lineno - 1 # type: ignore[operator] + start_idx = max(0, line_idx - context) + stop_idx = min(len(lines), line_idx + context + 1) + rendered_lines = [] + + def render_line(line: str, cls: str) -> None: + line = line.expandtabs().rstrip() + stripped_line = line.strip() + prefix = len(line) - len(stripped_line) + colno = getattr(self, "colno", 0) + end_colno = getattr(self, "end_colno", 0) + + if cls == "current" and colno and end_colno: + arrow = ( + f'\n{" " * prefix}' + f'{" " * (colno - prefix)}{"^" * (end_colno - colno)}' + ) + else: + arrow = "" + + rendered_lines.append( + f'
    {" " * prefix}'
    +                f"{escape(stripped_line) if stripped_line else ' '}"
    +                f"{arrow if arrow else ''}
    " + ) + + if lines: + for line in lines[start_idx:line_idx]: + render_line(line, "before") + + render_line(lines[line_idx], "current") + + for line in lines[line_idx + 1 : stop_idx]: + render_line(line, "after") + + return FRAME_HTML % { + "id": id(self), + "filename": escape(self.filename), + "lineno": self.lineno, + "function_name": escape(self.name), + "lines": "\n".join(rendered_lines), + "library": "library" if mark_library and self.is_library else "", + } + + +def render_console_html(secret: str, evalex_trusted: bool) -> str: + return CONSOLE_HTML % { + "evalex": "true", + "evalex_trusted": "true" if evalex_trusted else "false", + "console": "true", + "title": "Console", + "secret": secret, + } diff --git a/venv/lib/python3.8/site-packages/werkzeug/exceptions.py b/venv/lib/python3.8/site-packages/werkzeug/exceptions.py new file mode 100644 index 0000000..013df72 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/exceptions.py @@ -0,0 +1,884 @@ +"""Implements a number of Python exceptions which can be raised from within +a view to trigger a standard HTTP non-200 response. + +Usage Example +------------- + +.. code-block:: python + + from werkzeug.wrappers.request import Request + from werkzeug.exceptions import HTTPException, NotFound + + def view(request): + raise NotFound() + + @Request.application + def application(request): + try: + return view(request) + except HTTPException as e: + return e + +As you can see from this example those exceptions are callable WSGI +applications. However, they are not Werkzeug response objects. You +can get a response object by calling ``get_response()`` on a HTTP +exception. + +Keep in mind that you may have to pass an environ (WSGI) or scope +(ASGI) to ``get_response()`` because some errors fetch additional +information relating to the request. + +If you want to hook in a different exception page to say, a 404 status +code, you can add a second except for a specific subclass of an error: + +.. code-block:: python + + @Request.application + def application(request): + try: + return view(request) + except NotFound as e: + return not_found(request) + except HTTPException as e: + return e + +""" +import typing as t +from datetime import datetime + +from markupsafe import escape +from markupsafe import Markup + +from ._internal import _get_environ + +if t.TYPE_CHECKING: + import typing_extensions as te + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIEnvironment + from .datastructures import WWWAuthenticate + from .sansio.response import Response + from .wrappers.request import Request as WSGIRequest # noqa: F401 + from .wrappers.response import Response as WSGIResponse # noqa: F401 + + +class HTTPException(Exception): + """The base class for all HTTP exceptions. This exception can be called as a WSGI + application to render a default error page or you can catch the subclasses + of it independently and render nicer error messages. + + .. versionchanged:: 2.1 + Removed the ``wrap`` class method. + """ + + code: t.Optional[int] = None + description: t.Optional[str] = None + + def __init__( + self, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + ) -> None: + super().__init__() + if description is not None: + self.description = description + self.response = response + + @property + def name(self) -> str: + """The status name.""" + from .http import HTTP_STATUS_CODES + + return HTTP_STATUS_CODES.get(self.code, "Unknown Error") # type: ignore + + def get_description( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> str: + """Get the description.""" + if self.description is None: + description = "" + elif not isinstance(self.description, str): + description = str(self.description) + else: + description = self.description + + description = escape(description).replace("\n", Markup("
    ")) + return f"

    {description}

    " + + def get_body( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> str: + """Get the HTML body.""" + return ( + "\n" + "\n" + f"{self.code} {escape(self.name)}\n" + f"

    {escape(self.name)}

    \n" + f"{self.get_description(environ)}\n" + ) + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + """Get a list of headers.""" + return [("Content-Type", "text/html; charset=utf-8")] + + def get_response( + self, + environ: t.Optional[t.Union["WSGIEnvironment", "WSGIRequest"]] = None, + scope: t.Optional[dict] = None, + ) -> "Response": + """Get a response object. If one was passed to the exception + it's returned directly. + + :param environ: the optional environ for the request. This + can be used to modify the response depending + on how the request looked like. + :return: a :class:`Response` object or a subclass thereof. + """ + from .wrappers.response import Response as WSGIResponse # noqa: F811 + + if self.response is not None: + return self.response + if environ is not None: + environ = _get_environ(environ) + headers = self.get_headers(environ, scope) + return WSGIResponse(self.get_body(environ, scope), self.code, headers) + + def __call__( + self, environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + """Call the exception as WSGI application. + + :param environ: the WSGI environment. + :param start_response: the response callable provided by the WSGI + server. + """ + response = t.cast("WSGIResponse", self.get_response(environ)) + return response(environ, start_response) + + def __str__(self) -> str: + code = self.code if self.code is not None else "???" + return f"{code} {self.name}: {self.description}" + + def __repr__(self) -> str: + code = self.code if self.code is not None else "???" + return f"<{type(self).__name__} '{code}: {self.name}'>" + + +class BadRequest(HTTPException): + """*400* `Bad Request` + + Raise if the browser sends something to the application the application + or server cannot handle. + """ + + code = 400 + description = ( + "The browser (or proxy) sent a request that this server could " + "not understand." + ) + + +class BadRequestKeyError(BadRequest, KeyError): + """An exception that is used to signal both a :exc:`KeyError` and a + :exc:`BadRequest`. Used by many of the datastructures. + """ + + _description = BadRequest.description + #: Show the KeyError along with the HTTP error message in the + #: response. This should be disabled in production, but can be + #: useful in a debug mode. + show_exception = False + + def __init__(self, arg: t.Optional[str] = None, *args: t.Any, **kwargs: t.Any): + super().__init__(*args, **kwargs) + + if arg is None: + KeyError.__init__(self) + else: + KeyError.__init__(self, arg) + + @property # type: ignore + def description(self) -> str: # type: ignore + if self.show_exception: + return ( + f"{self._description}\n" + f"{KeyError.__name__}: {KeyError.__str__(self)}" + ) + + return self._description + + @description.setter + def description(self, value: str) -> None: + self._description = value + + +class ClientDisconnected(BadRequest): + """Internal exception that is raised if Werkzeug detects a disconnected + client. Since the client is already gone at that point attempting to + send the error message to the client might not work and might ultimately + result in another exception in the server. Mainly this is here so that + it is silenced by default as far as Werkzeug is concerned. + + Since disconnections cannot be reliably detected and are unspecified + by WSGI to a large extent this might or might not be raised if a client + is gone. + + .. versionadded:: 0.8 + """ + + +class SecurityError(BadRequest): + """Raised if something triggers a security error. This is otherwise + exactly like a bad request error. + + .. versionadded:: 0.9 + """ + + +class BadHost(BadRequest): + """Raised if the submitted host is badly formatted. + + .. versionadded:: 0.11.2 + """ + + +class Unauthorized(HTTPException): + """*401* ``Unauthorized`` + + Raise if the user is not authorized to access a resource. + + The ``www_authenticate`` argument should be used to set the + ``WWW-Authenticate`` header. This is used for HTTP basic auth and + other schemes. Use :class:`~werkzeug.datastructures.WWWAuthenticate` + to create correctly formatted values. Strictly speaking a 401 + response is invalid if it doesn't provide at least one value for + this header, although real clients typically don't care. + + :param description: Override the default message used for the body + of the response. + :param www-authenticate: A single value, or list of values, for the + WWW-Authenticate header(s). + + .. versionchanged:: 2.0 + Serialize multiple ``www_authenticate`` items into multiple + ``WWW-Authenticate`` headers, rather than joining them + into a single value, for better interoperability. + + .. versionchanged:: 0.15.3 + If the ``www_authenticate`` argument is not set, the + ``WWW-Authenticate`` header is not set. + + .. versionchanged:: 0.15.3 + The ``response`` argument was restored. + + .. versionchanged:: 0.15.1 + ``description`` was moved back as the first argument, restoring + its previous position. + + .. versionchanged:: 0.15.0 + ``www_authenticate`` was added as the first argument, ahead of + ``description``. + """ + + code = 401 + description = ( + "The server could not verify that you are authorized to access" + " the URL requested. You either supplied the wrong credentials" + " (e.g. a bad password), or your browser doesn't understand" + " how to supply the credentials required." + ) + + def __init__( + self, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + www_authenticate: t.Optional[ + t.Union["WWWAuthenticate", t.Iterable["WWWAuthenticate"]] + ] = None, + ) -> None: + super().__init__(description, response) + + from .datastructures import WWWAuthenticate + + if isinstance(www_authenticate, WWWAuthenticate): + www_authenticate = (www_authenticate,) + + self.www_authenticate = www_authenticate + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + headers = super().get_headers(environ, scope) + if self.www_authenticate: + headers.extend(("WWW-Authenticate", str(x)) for x in self.www_authenticate) + return headers + + +class Forbidden(HTTPException): + """*403* `Forbidden` + + Raise if the user doesn't have the permission for the requested resource + but was authenticated. + """ + + code = 403 + description = ( + "You don't have the permission to access the requested" + " resource. It is either read-protected or not readable by the" + " server." + ) + + +class NotFound(HTTPException): + """*404* `Not Found` + + Raise if a resource does not exist and never existed. + """ + + code = 404 + description = ( + "The requested URL was not found on the server. If you entered" + " the URL manually please check your spelling and try again." + ) + + +class MethodNotAllowed(HTTPException): + """*405* `Method Not Allowed` + + Raise if the server used a method the resource does not handle. For + example `POST` if the resource is view only. Especially useful for REST. + + The first argument for this exception should be a list of allowed methods. + Strictly speaking the response would be invalid if you don't provide valid + methods in the header which you can do with that list. + """ + + code = 405 + description = "The method is not allowed for the requested URL." + + def __init__( + self, + valid_methods: t.Optional[t.Iterable[str]] = None, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + ) -> None: + """Takes an optional list of valid http methods + starting with werkzeug 0.3 the list will be mandatory.""" + super().__init__(description=description, response=response) + self.valid_methods = valid_methods + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + headers = super().get_headers(environ, scope) + if self.valid_methods: + headers.append(("Allow", ", ".join(self.valid_methods))) + return headers + + +class NotAcceptable(HTTPException): + """*406* `Not Acceptable` + + Raise if the server can't return any content conforming to the + `Accept` headers of the client. + """ + + code = 406 + description = ( + "The resource identified by the request is only capable of" + " generating response entities which have content" + " characteristics not acceptable according to the accept" + " headers sent in the request." + ) + + +class RequestTimeout(HTTPException): + """*408* `Request Timeout` + + Raise to signalize a timeout. + """ + + code = 408 + description = ( + "The server closed the network connection because the browser" + " didn't finish the request within the specified time." + ) + + +class Conflict(HTTPException): + """*409* `Conflict` + + Raise to signal that a request cannot be completed because it conflicts + with the current state on the server. + + .. versionadded:: 0.7 + """ + + code = 409 + description = ( + "A conflict happened while processing the request. The" + " resource might have been modified while the request was being" + " processed." + ) + + +class Gone(HTTPException): + """*410* `Gone` + + Raise if a resource existed previously and went away without new location. + """ + + code = 410 + description = ( + "The requested URL is no longer available on this server and" + " there is no forwarding address. If you followed a link from a" + " foreign page, please contact the author of this page." + ) + + +class LengthRequired(HTTPException): + """*411* `Length Required` + + Raise if the browser submitted data but no ``Content-Length`` header which + is required for the kind of processing the server does. + """ + + code = 411 + description = ( + "A request with this method requires a valid Content-" + "Length header." + ) + + +class PreconditionFailed(HTTPException): + """*412* `Precondition Failed` + + Status code used in combination with ``If-Match``, ``If-None-Match``, or + ``If-Unmodified-Since``. + """ + + code = 412 + description = ( + "The precondition on the request for the URL failed positive evaluation." + ) + + +class RequestEntityTooLarge(HTTPException): + """*413* `Request Entity Too Large` + + The status code one should return if the data submitted exceeded a given + limit. + """ + + code = 413 + description = "The data value transmitted exceeds the capacity limit." + + +class RequestURITooLarge(HTTPException): + """*414* `Request URI Too Large` + + Like *413* but for too long URLs. + """ + + code = 414 + description = ( + "The length of the requested URL exceeds the capacity limit for" + " this server. The request cannot be processed." + ) + + +class UnsupportedMediaType(HTTPException): + """*415* `Unsupported Media Type` + + The status code returned if the server is unable to handle the media type + the client transmitted. + """ + + code = 415 + description = ( + "The server does not support the media type transmitted in the request." + ) + + +class RequestedRangeNotSatisfiable(HTTPException): + """*416* `Requested Range Not Satisfiable` + + The client asked for an invalid part of the file. + + .. versionadded:: 0.7 + """ + + code = 416 + description = "The server cannot provide the requested range." + + def __init__( + self, + length: t.Optional[int] = None, + units: str = "bytes", + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + ) -> None: + """Takes an optional `Content-Range` header value based on ``length`` + parameter. + """ + super().__init__(description=description, response=response) + self.length = length + self.units = units + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + headers = super().get_headers(environ, scope) + if self.length is not None: + headers.append(("Content-Range", f"{self.units} */{self.length}")) + return headers + + +class ExpectationFailed(HTTPException): + """*417* `Expectation Failed` + + The server cannot meet the requirements of the Expect request-header. + + .. versionadded:: 0.7 + """ + + code = 417 + description = "The server could not meet the requirements of the Expect header" + + +class ImATeapot(HTTPException): + """*418* `I'm a teapot` + + The server should return this if it is a teapot and someone attempted + to brew coffee with it. + + .. versionadded:: 0.7 + """ + + code = 418 + description = "This server is a teapot, not a coffee machine" + + +class UnprocessableEntity(HTTPException): + """*422* `Unprocessable Entity` + + Used if the request is well formed, but the instructions are otherwise + incorrect. + """ + + code = 422 + description = ( + "The request was well-formed but was unable to be followed due" + " to semantic errors." + ) + + +class Locked(HTTPException): + """*423* `Locked` + + Used if the resource that is being accessed is locked. + """ + + code = 423 + description = "The resource that is being accessed is locked." + + +class FailedDependency(HTTPException): + """*424* `Failed Dependency` + + Used if the method could not be performed on the resource + because the requested action depended on another action and that action failed. + """ + + code = 424 + description = ( + "The method could not be performed on the resource because the" + " requested action depended on another action and that action" + " failed." + ) + + +class PreconditionRequired(HTTPException): + """*428* `Precondition Required` + + The server requires this request to be conditional, typically to prevent + the lost update problem, which is a race condition between two or more + clients attempting to update a resource through PUT or DELETE. By requiring + each client to include a conditional header ("If-Match" or "If-Unmodified- + Since") with the proper value retained from a recent GET request, the + server ensures that each client has at least seen the previous revision of + the resource. + """ + + code = 428 + description = ( + "This request is required to be conditional; try using" + ' "If-Match" or "If-Unmodified-Since".' + ) + + +class _RetryAfter(HTTPException): + """Adds an optional ``retry_after`` parameter which will set the + ``Retry-After`` header. May be an :class:`int` number of seconds or + a :class:`~datetime.datetime`. + """ + + def __init__( + self, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + retry_after: t.Optional[t.Union[datetime, int]] = None, + ) -> None: + super().__init__(description, response) + self.retry_after = retry_after + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + headers = super().get_headers(environ, scope) + + if self.retry_after: + if isinstance(self.retry_after, datetime): + from .http import http_date + + value = http_date(self.retry_after) + else: + value = str(self.retry_after) + + headers.append(("Retry-After", value)) + + return headers + + +class TooManyRequests(_RetryAfter): + """*429* `Too Many Requests` + + The server is limiting the rate at which this user receives + responses, and this request exceeds that rate. (The server may use + any convenient method to identify users and their request rates). + The server may include a "Retry-After" header to indicate how long + the user should wait before retrying. + + :param retry_after: If given, set the ``Retry-After`` header to this + value. May be an :class:`int` number of seconds or a + :class:`~datetime.datetime`. + + .. versionchanged:: 1.0 + Added ``retry_after`` parameter. + """ + + code = 429 + description = "This user has exceeded an allotted request count. Try again later." + + +class RequestHeaderFieldsTooLarge(HTTPException): + """*431* `Request Header Fields Too Large` + + The server refuses to process the request because the header fields are too + large. One or more individual fields may be too large, or the set of all + headers is too large. + """ + + code = 431 + description = "One or more header fields exceeds the maximum size." + + +class UnavailableForLegalReasons(HTTPException): + """*451* `Unavailable For Legal Reasons` + + This status code indicates that the server is denying access to the + resource as a consequence of a legal demand. + """ + + code = 451 + description = "Unavailable for legal reasons." + + +class InternalServerError(HTTPException): + """*500* `Internal Server Error` + + Raise if an internal server error occurred. This is a good fallback if an + unknown error occurred in the dispatcher. + + .. versionchanged:: 1.0.0 + Added the :attr:`original_exception` attribute. + """ + + code = 500 + description = ( + "The server encountered an internal error and was unable to" + " complete your request. Either the server is overloaded or" + " there is an error in the application." + ) + + def __init__( + self, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + original_exception: t.Optional[BaseException] = None, + ) -> None: + #: The original exception that caused this 500 error. Can be + #: used by frameworks to provide context when handling + #: unexpected errors. + self.original_exception = original_exception + super().__init__(description=description, response=response) + + +class NotImplemented(HTTPException): + """*501* `Not Implemented` + + Raise if the application does not support the action requested by the + browser. + """ + + code = 501 + description = "The server does not support the action requested by the browser." + + +class BadGateway(HTTPException): + """*502* `Bad Gateway` + + If you do proxying in your application you should return this status code + if you received an invalid response from the upstream server it accessed + in attempting to fulfill the request. + """ + + code = 502 + description = ( + "The proxy server received an invalid response from an upstream server." + ) + + +class ServiceUnavailable(_RetryAfter): + """*503* `Service Unavailable` + + Status code you should return if a service is temporarily + unavailable. + + :param retry_after: If given, set the ``Retry-After`` header to this + value. May be an :class:`int` number of seconds or a + :class:`~datetime.datetime`. + + .. versionchanged:: 1.0 + Added ``retry_after`` parameter. + """ + + code = 503 + description = ( + "The server is temporarily unable to service your request due" + " to maintenance downtime or capacity problems. Please try" + " again later." + ) + + +class GatewayTimeout(HTTPException): + """*504* `Gateway Timeout` + + Status code you should return if a connection to an upstream server + times out. + """ + + code = 504 + description = "The connection to an upstream server timed out." + + +class HTTPVersionNotSupported(HTTPException): + """*505* `HTTP Version Not Supported` + + The server does not support the HTTP protocol version used in the request. + """ + + code = 505 + description = ( + "The server does not support the HTTP protocol version used in the request." + ) + + +default_exceptions: t.Dict[int, t.Type[HTTPException]] = {} + + +def _find_exceptions() -> None: + for obj in globals().values(): + try: + is_http_exception = issubclass(obj, HTTPException) + except TypeError: + is_http_exception = False + if not is_http_exception or obj.code is None: + continue + old_obj = default_exceptions.get(obj.code, None) + if old_obj is not None and issubclass(obj, old_obj): + continue + default_exceptions[obj.code] = obj + + +_find_exceptions() +del _find_exceptions + + +class Aborter: + """When passed a dict of code -> exception items it can be used as + callable that raises exceptions. If the first argument to the + callable is an integer it will be looked up in the mapping, if it's + a WSGI application it will be raised in a proxy exception. + + The rest of the arguments are forwarded to the exception constructor. + """ + + def __init__( + self, + mapping: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None, + extra: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None, + ) -> None: + if mapping is None: + mapping = default_exceptions + self.mapping = dict(mapping) + if extra is not None: + self.mapping.update(extra) + + def __call__( + self, code: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any + ) -> "te.NoReturn": + from .sansio.response import Response + + if isinstance(code, Response): + raise HTTPException(response=code) + + if code not in self.mapping: + raise LookupError(f"no exception for {code!r}") + + raise self.mapping[code](*args, **kwargs) + + +def abort( + status: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any +) -> "te.NoReturn": + """Raises an :py:exc:`HTTPException` for the given status code or WSGI + application. + + If a status code is given, it will be looked up in the list of + exceptions and will raise that exception. If passed a WSGI application, + it will wrap it in a proxy WSGI exception and raise that:: + + abort(404) # 404 Not Found + abort(Response('Hello World')) + + """ + _aborter(status, *args, **kwargs) + + +_aborter: Aborter = Aborter() diff --git a/venv/lib/python3.8/site-packages/werkzeug/formparser.py b/venv/lib/python3.8/site-packages/werkzeug/formparser.py new file mode 120000 index 0000000..d6ef511 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/formparser.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/ac/b1/2e/fd9c15a6faac85983a138422bf7e90b26ce60b2b538a305e197ddd0c69 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/http.py b/venv/lib/python3.8/site-packages/werkzeug/http.py new file mode 100644 index 0000000..9777685 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/http.py @@ -0,0 +1,1311 @@ +import base64 +import email.utils +import re +import typing +import typing as t +import warnings +from datetime import date +from datetime import datetime +from datetime import time +from datetime import timedelta +from datetime import timezone +from enum import Enum +from hashlib import sha1 +from time import mktime +from time import struct_time +from urllib.parse import unquote_to_bytes as _unquote +from urllib.request import parse_http_list as _parse_list_header + +from ._internal import _cookie_quote +from ._internal import _dt_as_utc +from ._internal import _make_cookie_domain +from ._internal import _to_bytes +from ._internal import _to_str +from ._internal import _wsgi_decoding_dance + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIEnvironment + +# for explanation of "media-range", etc. see Sections 5.3.{1,2} of RFC 7231 +_accept_re = re.compile( + r""" + ( # media-range capturing-parenthesis + [^\s;,]+ # type/subtype + (?:[ \t]*;[ \t]* # ";" + (?: # parameter non-capturing-parenthesis + [^\s;,q][^\s;,]* # token that doesn't start with "q" + | # or + q[^\s;,=][^\s;,]* # token that is more than just "q" + ) + )* # zero or more parameters + ) # end of media-range + (?:[ \t]*;[ \t]*q= # weight is a "q" parameter + (\d*(?:\.\d+)?) # qvalue capturing-parentheses + [^,]* # "extension" accept params: who cares? + )? # accept params are optional + """, + re.VERBOSE, +) +_token_chars = frozenset( + "!#$%&'*+-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ^_`abcdefghijklmnopqrstuvwxyz|~" +) +_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)') +_option_header_piece_re = re.compile( + r""" + ;\s*,?\s* # newlines were replaced with commas + (?P + "[^"\\]*(?:\\.[^"\\]*)*" # quoted string + | + [^\s;,=*]+ # token + ) + (?:\*(?P\d+))? # *1, optional continuation index + \s* + (?: # optionally followed by =value + (?: # equals sign, possibly with encoding + \*\s*=\s* # * indicates extended notation + (?: # optional encoding + (?P[^\s]+?) + '(?P[^\s]*?)' + )? + | + =\s* # basic notation + ) + (?P + "[^"\\]*(?:\\.[^"\\]*)*" # quoted string + | + [^;,]+ # token + )? + )? + \s* + """, + flags=re.VERBOSE, +) +_option_header_start_mime_type = re.compile(r",\s*([^;,\s]+)([;,]\s*.+)?") +_entity_headers = frozenset( + [ + "allow", + "content-encoding", + "content-language", + "content-length", + "content-location", + "content-md5", + "content-range", + "content-type", + "expires", + "last-modified", + ] +) +_hop_by_hop_headers = frozenset( + [ + "connection", + "keep-alive", + "proxy-authenticate", + "proxy-authorization", + "te", + "trailer", + "transfer-encoding", + "upgrade", + ] +) +HTTP_STATUS_CODES = { + 100: "Continue", + 101: "Switching Protocols", + 102: "Processing", + 103: "Early Hints", # see RFC 8297 + 200: "OK", + 201: "Created", + 202: "Accepted", + 203: "Non Authoritative Information", + 204: "No Content", + 205: "Reset Content", + 206: "Partial Content", + 207: "Multi Status", + 208: "Already Reported", # see RFC 5842 + 226: "IM Used", # see RFC 3229 + 300: "Multiple Choices", + 301: "Moved Permanently", + 302: "Found", + 303: "See Other", + 304: "Not Modified", + 305: "Use Proxy", + 306: "Switch Proxy", # unused + 307: "Temporary Redirect", + 308: "Permanent Redirect", + 400: "Bad Request", + 401: "Unauthorized", + 402: "Payment Required", # unused + 403: "Forbidden", + 404: "Not Found", + 405: "Method Not Allowed", + 406: "Not Acceptable", + 407: "Proxy Authentication Required", + 408: "Request Timeout", + 409: "Conflict", + 410: "Gone", + 411: "Length Required", + 412: "Precondition Failed", + 413: "Request Entity Too Large", + 414: "Request URI Too Long", + 415: "Unsupported Media Type", + 416: "Requested Range Not Satisfiable", + 417: "Expectation Failed", + 418: "I'm a teapot", # see RFC 2324 + 421: "Misdirected Request", # see RFC 7540 + 422: "Unprocessable Entity", + 423: "Locked", + 424: "Failed Dependency", + 425: "Too Early", # see RFC 8470 + 426: "Upgrade Required", + 428: "Precondition Required", # see RFC 6585 + 429: "Too Many Requests", + 431: "Request Header Fields Too Large", + 449: "Retry With", # proprietary MS extension + 451: "Unavailable For Legal Reasons", + 500: "Internal Server Error", + 501: "Not Implemented", + 502: "Bad Gateway", + 503: "Service Unavailable", + 504: "Gateway Timeout", + 505: "HTTP Version Not Supported", + 506: "Variant Also Negotiates", # see RFC 2295 + 507: "Insufficient Storage", + 508: "Loop Detected", # see RFC 5842 + 510: "Not Extended", + 511: "Network Authentication Failed", +} + + +class COEP(Enum): + """Cross Origin Embedder Policies""" + + UNSAFE_NONE = "unsafe-none" + REQUIRE_CORP = "require-corp" + + +class COOP(Enum): + """Cross Origin Opener Policies""" + + UNSAFE_NONE = "unsafe-none" + SAME_ORIGIN_ALLOW_POPUPS = "same-origin-allow-popups" + SAME_ORIGIN = "same-origin" + + +def quote_header_value( + value: t.Union[str, int], extra_chars: str = "", allow_token: bool = True +) -> str: + """Quote a header value if necessary. + + .. versionadded:: 0.5 + + :param value: the value to quote. + :param extra_chars: a list of extra characters to skip quoting. + :param allow_token: if this is enabled token values are returned + unchanged. + """ + if isinstance(value, bytes): + value = value.decode("latin1") + value = str(value) + if allow_token: + token_chars = _token_chars | set(extra_chars) + if set(value).issubset(token_chars): + return value + value = value.replace("\\", "\\\\").replace('"', '\\"') + return f'"{value}"' + + +def unquote_header_value(value: str, is_filename: bool = False) -> str: + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + .. versionadded:: 0.5 + + :param value: the header value to unquote. + :param is_filename: The value represents a filename or path. + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != "\\\\": + return value.replace("\\\\", "\\").replace('\\"', '"') + return value + + +def dump_options_header( + header: t.Optional[str], options: t.Mapping[str, t.Optional[t.Union[str, int]]] +) -> str: + """The reverse function to :func:`parse_options_header`. + + :param header: the header to dump + :param options: a dict of options to append. + """ + segments = [] + if header is not None: + segments.append(header) + for key, value in options.items(): + if value is None: + segments.append(key) + else: + segments.append(f"{key}={quote_header_value(value)}") + return "; ".join(segments) + + +def dump_header( + iterable: t.Union[t.Dict[str, t.Union[str, int]], t.Iterable[str]], + allow_token: bool = True, +) -> str: + """Dump an HTTP header again. This is the reversal of + :func:`parse_list_header`, :func:`parse_set_header` and + :func:`parse_dict_header`. This also quotes strings that include an + equals sign unless you pass it as dict of key, value pairs. + + >>> dump_header({'foo': 'bar baz'}) + 'foo="bar baz"' + >>> dump_header(('foo', 'bar baz')) + 'foo, "bar baz"' + + :param iterable: the iterable or dict of values to quote. + :param allow_token: if set to `False` tokens as values are disallowed. + See :func:`quote_header_value` for more details. + """ + if isinstance(iterable, dict): + items = [] + for key, value in iterable.items(): + if value is None: + items.append(key) + else: + items.append( + f"{key}={quote_header_value(value, allow_token=allow_token)}" + ) + else: + items = [quote_header_value(x, allow_token=allow_token) for x in iterable] + return ", ".join(items) + + +def dump_csp_header(header: "ds.ContentSecurityPolicy") -> str: + """Dump a Content Security Policy header. + + These are structured into policies such as "default-src 'self'; + script-src 'self'". + + .. versionadded:: 1.0.0 + Support for Content Security Policy headers was added. + + """ + return "; ".join(f"{key} {value}" for key, value in header.items()) + + +def parse_list_header(value: str) -> t.List[str]: + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +def parse_dict_header(value: str, cls: t.Type[dict] = dict) -> t.Dict[str, str]: + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict (or any other mapping object created from + the type with a dict like interface provided by the `cls` argument): + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + .. versionchanged:: 0.9 + Added support for `cls` argument. + + :param value: a string with a dict header. + :param cls: callable to use for storage of parsed results. + :return: an instance of `cls` + """ + result = cls() + if isinstance(value, bytes): + value = value.decode("latin1") + for item in _parse_list_header(value): + if "=" not in item: + result[item] = None + continue + name, value = item.split("=", 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +def parse_options_header(value: t.Optional[str]) -> t.Tuple[str, t.Dict[str, str]]: + """Parse a ``Content-Type``-like header into a tuple with the + value and any options: + + >>> parse_options_header('text/html; charset=utf8') + ('text/html', {'charset': 'utf8'}) + + This should is not for ``Cache-Control``-like headers, which use a + different format. For those, use :func:`parse_dict_header`. + + :param value: The header value to parse. + + .. versionchanged:: 2.2 + Option names are always converted to lowercase. + + .. versionchanged:: 2.1 + The ``multiple`` parameter is deprecated and will be removed in + Werkzeug 2.2. + + .. versionchanged:: 0.15 + :rfc:`2231` parameter continuations are handled. + + .. versionadded:: 0.5 + """ + if not value: + return "", {} + + result: t.List[t.Any] = [] + + value = "," + value.replace("\n", ",") + while value: + match = _option_header_start_mime_type.match(value) + if not match: + break + result.append(match.group(1)) # mimetype + options: t.Dict[str, str] = {} + # Parse options + rest = match.group(2) + encoding: t.Optional[str] + continued_encoding: t.Optional[str] = None + while rest: + optmatch = _option_header_piece_re.match(rest) + if not optmatch: + break + option, count, encoding, language, option_value = optmatch.groups() + # Continuations don't have to supply the encoding after the + # first line. If we're in a continuation, track the current + # encoding to use for subsequent lines. Reset it when the + # continuation ends. + if not count: + continued_encoding = None + else: + if not encoding: + encoding = continued_encoding + continued_encoding = encoding + option = unquote_header_value(option).lower() + + if option_value is not None: + option_value = unquote_header_value(option_value, option == "filename") + + if encoding is not None: + option_value = _unquote(option_value).decode(encoding) + + if count: + # Continuations append to the existing value. For + # simplicity, this ignores the possibility of + # out-of-order indices, which shouldn't happen anyway. + if option_value is not None: + options[option] = options.get(option, "") + option_value + else: + options[option] = option_value # type: ignore[assignment] + + rest = rest[optmatch.end() :] + result.append(options) + return tuple(result) # type: ignore[return-value] + + return tuple(result) if result else ("", {}) # type: ignore[return-value] + + +_TAnyAccept = t.TypeVar("_TAnyAccept", bound="ds.Accept") + + +@typing.overload +def parse_accept_header(value: t.Optional[str]) -> "ds.Accept": + ... + + +@typing.overload +def parse_accept_header( + value: t.Optional[str], cls: t.Type[_TAnyAccept] +) -> _TAnyAccept: + ... + + +def parse_accept_header( + value: t.Optional[str], cls: t.Optional[t.Type[_TAnyAccept]] = None +) -> _TAnyAccept: + """Parses an HTTP Accept-* header. This does not implement a complete + valid algorithm but one that supports at least value and quality + extraction. + + Returns a new :class:`Accept` object (basically a list of ``(value, quality)`` + tuples sorted by the quality with some additional accessor methods). + + The second parameter can be a subclass of :class:`Accept` that is created + with the parsed values and returned. + + :param value: the accept header string to be parsed. + :param cls: the wrapper class for the return value (can be + :class:`Accept` or a subclass thereof) + :return: an instance of `cls`. + """ + if cls is None: + cls = t.cast(t.Type[_TAnyAccept], ds.Accept) + + if not value: + return cls(None) + + result = [] + for match in _accept_re.finditer(value): + quality_match = match.group(2) + if not quality_match: + quality: float = 1 + else: + quality = max(min(float(quality_match), 1), 0) + result.append((match.group(1), quality)) + return cls(result) + + +_TAnyCC = t.TypeVar("_TAnyCC", bound="ds._CacheControl") +_t_cc_update = t.Optional[t.Callable[[_TAnyCC], None]] + + +@typing.overload +def parse_cache_control_header( + value: t.Optional[str], on_update: _t_cc_update, cls: None = None +) -> "ds.RequestCacheControl": + ... + + +@typing.overload +def parse_cache_control_header( + value: t.Optional[str], on_update: _t_cc_update, cls: t.Type[_TAnyCC] +) -> _TAnyCC: + ... + + +def parse_cache_control_header( + value: t.Optional[str], + on_update: _t_cc_update = None, + cls: t.Optional[t.Type[_TAnyCC]] = None, +) -> _TAnyCC: + """Parse a cache control header. The RFC differs between response and + request cache control, this method does not. It's your responsibility + to not use the wrong control statements. + + .. versionadded:: 0.5 + The `cls` was added. If not specified an immutable + :class:`~werkzeug.datastructures.RequestCacheControl` is returned. + + :param value: a cache control header to be parsed. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.CacheControl` + object is changed. + :param cls: the class for the returned object. By default + :class:`~werkzeug.datastructures.RequestCacheControl` is used. + :return: a `cls` object. + """ + if cls is None: + cls = t.cast(t.Type[_TAnyCC], ds.RequestCacheControl) + + if not value: + return cls((), on_update) + + return cls(parse_dict_header(value), on_update) + + +_TAnyCSP = t.TypeVar("_TAnyCSP", bound="ds.ContentSecurityPolicy") +_t_csp_update = t.Optional[t.Callable[[_TAnyCSP], None]] + + +@typing.overload +def parse_csp_header( + value: t.Optional[str], on_update: _t_csp_update, cls: None = None +) -> "ds.ContentSecurityPolicy": + ... + + +@typing.overload +def parse_csp_header( + value: t.Optional[str], on_update: _t_csp_update, cls: t.Type[_TAnyCSP] +) -> _TAnyCSP: + ... + + +def parse_csp_header( + value: t.Optional[str], + on_update: _t_csp_update = None, + cls: t.Optional[t.Type[_TAnyCSP]] = None, +) -> _TAnyCSP: + """Parse a Content Security Policy header. + + .. versionadded:: 1.0.0 + Support for Content Security Policy headers was added. + + :param value: a csp header to be parsed. + :param on_update: an optional callable that is called every time a value + on the object is changed. + :param cls: the class for the returned object. By default + :class:`~werkzeug.datastructures.ContentSecurityPolicy` is used. + :return: a `cls` object. + """ + if cls is None: + cls = t.cast(t.Type[_TAnyCSP], ds.ContentSecurityPolicy) + + if value is None: + return cls((), on_update) + + items = [] + + for policy in value.split(";"): + policy = policy.strip() + + # Ignore badly formatted policies (no space) + if " " in policy: + directive, value = policy.strip().split(" ", 1) + items.append((directive.strip(), value.strip())) + + return cls(items, on_update) + + +def parse_set_header( + value: t.Optional[str], + on_update: t.Optional[t.Callable[["ds.HeaderSet"], None]] = None, +) -> "ds.HeaderSet": + """Parse a set-like header and return a + :class:`~werkzeug.datastructures.HeaderSet` object: + + >>> hs = parse_set_header('token, "quoted value"') + + The return value is an object that treats the items case-insensitively + and keeps the order of the items: + + >>> 'TOKEN' in hs + True + >>> hs.index('quoted value') + 1 + >>> hs + HeaderSet(['token', 'quoted value']) + + To create a header from the :class:`HeaderSet` again, use the + :func:`dump_header` function. + + :param value: a set header to be parsed. + :param on_update: an optional callable that is called every time a + value on the :class:`~werkzeug.datastructures.HeaderSet` + object is changed. + :return: a :class:`~werkzeug.datastructures.HeaderSet` + """ + if not value: + return ds.HeaderSet(None, on_update) + return ds.HeaderSet(parse_list_header(value), on_update) + + +def parse_authorization_header( + value: t.Optional[str], +) -> t.Optional["ds.Authorization"]: + """Parse an HTTP basic/digest authorization header transmitted by the web + browser. The return value is either `None` if the header was invalid or + not given, otherwise an :class:`~werkzeug.datastructures.Authorization` + object. + + :param value: the authorization header to parse. + :return: a :class:`~werkzeug.datastructures.Authorization` object or `None`. + """ + if not value: + return None + value = _wsgi_decoding_dance(value) + try: + auth_type, auth_info = value.split(None, 1) + auth_type = auth_type.lower() + except ValueError: + return None + if auth_type == "basic": + try: + username, password = base64.b64decode(auth_info).split(b":", 1) + except Exception: + return None + try: + return ds.Authorization( + "basic", + { + "username": _to_str(username, "utf-8"), + "password": _to_str(password, "utf-8"), + }, + ) + except UnicodeDecodeError: + return None + elif auth_type == "digest": + auth_map = parse_dict_header(auth_info) + for key in "username", "realm", "nonce", "uri", "response": + if key not in auth_map: + return None + if "qop" in auth_map: + if not auth_map.get("nc") or not auth_map.get("cnonce"): + return None + return ds.Authorization("digest", auth_map) + return None + + +def parse_www_authenticate_header( + value: t.Optional[str], + on_update: t.Optional[t.Callable[["ds.WWWAuthenticate"], None]] = None, +) -> "ds.WWWAuthenticate": + """Parse an HTTP WWW-Authenticate header into a + :class:`~werkzeug.datastructures.WWWAuthenticate` object. + + :param value: a WWW-Authenticate header to parse. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.WWWAuthenticate` + object is changed. + :return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object. + """ + if not value: + return ds.WWWAuthenticate(on_update=on_update) + try: + auth_type, auth_info = value.split(None, 1) + auth_type = auth_type.lower() + except (ValueError, AttributeError): + return ds.WWWAuthenticate(value.strip().lower(), on_update=on_update) + return ds.WWWAuthenticate(auth_type, parse_dict_header(auth_info), on_update) + + +def parse_if_range_header(value: t.Optional[str]) -> "ds.IfRange": + """Parses an if-range header which can be an etag or a date. Returns + a :class:`~werkzeug.datastructures.IfRange` object. + + .. versionchanged:: 2.0 + If the value represents a datetime, it is timezone-aware. + + .. versionadded:: 0.7 + """ + if not value: + return ds.IfRange() + date = parse_date(value) + if date is not None: + return ds.IfRange(date=date) + # drop weakness information + return ds.IfRange(unquote_etag(value)[0]) + + +def parse_range_header( + value: t.Optional[str], make_inclusive: bool = True +) -> t.Optional["ds.Range"]: + """Parses a range header into a :class:`~werkzeug.datastructures.Range` + object. If the header is missing or malformed `None` is returned. + `ranges` is a list of ``(start, stop)`` tuples where the ranges are + non-inclusive. + + .. versionadded:: 0.7 + """ + if not value or "=" not in value: + return None + + ranges = [] + last_end = 0 + units, rng = value.split("=", 1) + units = units.strip().lower() + + for item in rng.split(","): + item = item.strip() + if "-" not in item: + return None + if item.startswith("-"): + if last_end < 0: + return None + try: + begin = int(item) + except ValueError: + return None + end = None + last_end = -1 + elif "-" in item: + begin_str, end_str = item.split("-", 1) + begin_str = begin_str.strip() + end_str = end_str.strip() + + try: + begin = int(begin_str) + except ValueError: + return None + + if begin < last_end or last_end < 0: + return None + if end_str: + try: + end = int(end_str) + 1 + except ValueError: + return None + + if begin >= end: + return None + else: + end = None + last_end = end if end is not None else -1 + ranges.append((begin, end)) + + return ds.Range(units, ranges) + + +def parse_content_range_header( + value: t.Optional[str], + on_update: t.Optional[t.Callable[["ds.ContentRange"], None]] = None, +) -> t.Optional["ds.ContentRange"]: + """Parses a range header into a + :class:`~werkzeug.datastructures.ContentRange` object or `None` if + parsing is not possible. + + .. versionadded:: 0.7 + + :param value: a content range header to be parsed. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.ContentRange` + object is changed. + """ + if value is None: + return None + try: + units, rangedef = (value or "").strip().split(None, 1) + except ValueError: + return None + + if "/" not in rangedef: + return None + rng, length_str = rangedef.split("/", 1) + if length_str == "*": + length = None + else: + try: + length = int(length_str) + except ValueError: + return None + + if rng == "*": + return ds.ContentRange(units, None, None, length, on_update=on_update) + elif "-" not in rng: + return None + + start_str, stop_str = rng.split("-", 1) + try: + start = int(start_str) + stop = int(stop_str) + 1 + except ValueError: + return None + + if is_byte_range_valid(start, stop, length): + return ds.ContentRange(units, start, stop, length, on_update=on_update) + + return None + + +def quote_etag(etag: str, weak: bool = False) -> str: + """Quote an etag. + + :param etag: the etag to quote. + :param weak: set to `True` to tag it "weak". + """ + if '"' in etag: + raise ValueError("invalid etag") + etag = f'"{etag}"' + if weak: + etag = f"W/{etag}" + return etag + + +def unquote_etag( + etag: t.Optional[str], +) -> t.Union[t.Tuple[str, bool], t.Tuple[None, None]]: + """Unquote a single etag: + + >>> unquote_etag('W/"bar"') + ('bar', True) + >>> unquote_etag('"bar"') + ('bar', False) + + :param etag: the etag identifier to unquote. + :return: a ``(etag, weak)`` tuple. + """ + if not etag: + return None, None + etag = etag.strip() + weak = False + if etag.startswith(("W/", "w/")): + weak = True + etag = etag[2:] + if etag[:1] == etag[-1:] == '"': + etag = etag[1:-1] + return etag, weak + + +def parse_etags(value: t.Optional[str]) -> "ds.ETags": + """Parse an etag header. + + :param value: the tag header to parse + :return: an :class:`~werkzeug.datastructures.ETags` object. + """ + if not value: + return ds.ETags() + strong = [] + weak = [] + end = len(value) + pos = 0 + while pos < end: + match = _etag_re.match(value, pos) + if match is None: + break + is_weak, quoted, raw = match.groups() + if raw == "*": + return ds.ETags(star_tag=True) + elif quoted: + raw = quoted + if is_weak: + weak.append(raw) + else: + strong.append(raw) + pos = match.end() + return ds.ETags(strong, weak) + + +def generate_etag(data: bytes) -> str: + """Generate an etag for some data. + + .. versionchanged:: 2.0 + Use SHA-1. MD5 may not be available in some environments. + """ + return sha1(data).hexdigest() + + +def parse_date(value: t.Optional[str]) -> t.Optional[datetime]: + """Parse an :rfc:`2822` date into a timezone-aware + :class:`datetime.datetime` object, or ``None`` if parsing fails. + + This is a wrapper for :func:`email.utils.parsedate_to_datetime`. It + returns ``None`` if parsing fails instead of raising an exception, + and always returns a timezone-aware datetime object. If the string + doesn't have timezone information, it is assumed to be UTC. + + :param value: A string with a supported date format. + + .. versionchanged:: 2.0 + Return a timezone-aware datetime object. Use + ``email.utils.parsedate_to_datetime``. + """ + if value is None: + return None + + try: + dt = email.utils.parsedate_to_datetime(value) + except (TypeError, ValueError): + return None + + if dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + + return dt + + +def http_date( + timestamp: t.Optional[t.Union[datetime, date, int, float, struct_time]] = None +) -> str: + """Format a datetime object or timestamp into an :rfc:`2822` date + string. + + This is a wrapper for :func:`email.utils.format_datetime`. It + assumes naive datetime objects are in UTC instead of raising an + exception. + + :param timestamp: The datetime or timestamp to format. Defaults to + the current time. + + .. versionchanged:: 2.0 + Use ``email.utils.format_datetime``. Accept ``date`` objects. + """ + if isinstance(timestamp, date): + if not isinstance(timestamp, datetime): + # Assume plain date is midnight UTC. + timestamp = datetime.combine(timestamp, time(), tzinfo=timezone.utc) + else: + # Ensure datetime is timezone-aware. + timestamp = _dt_as_utc(timestamp) + + return email.utils.format_datetime(timestamp, usegmt=True) + + if isinstance(timestamp, struct_time): + timestamp = mktime(timestamp) + + return email.utils.formatdate(timestamp, usegmt=True) + + +def parse_age(value: t.Optional[str] = None) -> t.Optional[timedelta]: + """Parses a base-10 integer count of seconds into a timedelta. + + If parsing fails, the return value is `None`. + + :param value: a string consisting of an integer represented in base-10 + :return: a :class:`datetime.timedelta` object or `None`. + """ + if not value: + return None + try: + seconds = int(value) + except ValueError: + return None + if seconds < 0: + return None + try: + return timedelta(seconds=seconds) + except OverflowError: + return None + + +def dump_age(age: t.Optional[t.Union[timedelta, int]] = None) -> t.Optional[str]: + """Formats the duration as a base-10 integer. + + :param age: should be an integer number of seconds, + a :class:`datetime.timedelta` object, or, + if the age is unknown, `None` (default). + """ + if age is None: + return None + if isinstance(age, timedelta): + age = int(age.total_seconds()) + else: + age = int(age) + + if age < 0: + raise ValueError("age cannot be negative") + + return str(age) + + +def is_resource_modified( + environ: "WSGIEnvironment", + etag: t.Optional[str] = None, + data: t.Optional[bytes] = None, + last_modified: t.Optional[t.Union[datetime, str]] = None, + ignore_if_range: bool = True, +) -> bool: + """Convenience method for conditional requests. + + :param environ: the WSGI environment of the request to be checked. + :param etag: the etag for the response for comparison. + :param data: or alternatively the data of the response to automatically + generate an etag using :func:`generate_etag`. + :param last_modified: an optional date of the last modification. + :param ignore_if_range: If `False`, `If-Range` header will be taken into + account. + :return: `True` if the resource was modified, otherwise `False`. + + .. versionchanged:: 2.0 + SHA-1 is used to generate an etag value for the data. MD5 may + not be available in some environments. + + .. versionchanged:: 1.0.0 + The check is run for methods other than ``GET`` and ``HEAD``. + """ + return _sansio_http.is_resource_modified( + http_range=environ.get("HTTP_RANGE"), + http_if_range=environ.get("HTTP_IF_RANGE"), + http_if_modified_since=environ.get("HTTP_IF_MODIFIED_SINCE"), + http_if_none_match=environ.get("HTTP_IF_NONE_MATCH"), + http_if_match=environ.get("HTTP_IF_MATCH"), + etag=etag, + data=data, + last_modified=last_modified, + ignore_if_range=ignore_if_range, + ) + + +def remove_entity_headers( + headers: t.Union["ds.Headers", t.List[t.Tuple[str, str]]], + allowed: t.Iterable[str] = ("expires", "content-location"), +) -> None: + """Remove all entity headers from a list or :class:`Headers` object. This + operation works in-place. `Expires` and `Content-Location` headers are + by default not removed. The reason for this is :rfc:`2616` section + 10.3.5 which specifies some entity headers that should be sent. + + .. versionchanged:: 0.5 + added `allowed` parameter. + + :param headers: a list or :class:`Headers` object. + :param allowed: a list of headers that should still be allowed even though + they are entity headers. + """ + allowed = {x.lower() for x in allowed} + headers[:] = [ + (key, value) + for key, value in headers + if not is_entity_header(key) or key.lower() in allowed + ] + + +def remove_hop_by_hop_headers( + headers: t.Union["ds.Headers", t.List[t.Tuple[str, str]]] +) -> None: + """Remove all HTTP/1.1 "Hop-by-Hop" headers from a list or + :class:`Headers` object. This operation works in-place. + + .. versionadded:: 0.5 + + :param headers: a list or :class:`Headers` object. + """ + headers[:] = [ + (key, value) for key, value in headers if not is_hop_by_hop_header(key) + ] + + +def is_entity_header(header: str) -> bool: + """Check if a header is an entity header. + + .. versionadded:: 0.5 + + :param header: the header to test. + :return: `True` if it's an entity header, `False` otherwise. + """ + return header.lower() in _entity_headers + + +def is_hop_by_hop_header(header: str) -> bool: + """Check if a header is an HTTP/1.1 "Hop-by-Hop" header. + + .. versionadded:: 0.5 + + :param header: the header to test. + :return: `True` if it's an HTTP/1.1 "Hop-by-Hop" header, `False` otherwise. + """ + return header.lower() in _hop_by_hop_headers + + +def parse_cookie( + header: t.Union["WSGIEnvironment", str, bytes, None], + charset: str = "utf-8", + errors: str = "replace", + cls: t.Optional[t.Type["ds.MultiDict"]] = None, +) -> "ds.MultiDict[str, str]": + """Parse a cookie from a string or WSGI environ. + + The same key can be provided multiple times, the values are stored + in-order. The default :class:`MultiDict` will have the first value + first, and all values can be retrieved with + :meth:`MultiDict.getlist`. + + :param header: The cookie header as a string, or a WSGI environ dict + with a ``HTTP_COOKIE`` key. + :param charset: The charset for the cookie values. + :param errors: The error behavior for the charset decoding. + :param cls: A dict-like class to store the parsed cookies in. + Defaults to :class:`MultiDict`. + + .. versionchanged:: 1.0.0 + Returns a :class:`MultiDict` instead of a + ``TypeConversionDict``. + + .. versionchanged:: 0.5 + Returns a :class:`TypeConversionDict` instead of a regular dict. + The ``cls`` parameter was added. + """ + if isinstance(header, dict): + cookie = header.get("HTTP_COOKIE", "") + elif header is None: + cookie = "" + else: + cookie = header + + return _sansio_http.parse_cookie( + cookie=cookie, charset=charset, errors=errors, cls=cls + ) + + +def dump_cookie( + key: str, + value: t.Union[bytes, str] = "", + max_age: t.Optional[t.Union[timedelta, int]] = None, + expires: t.Optional[t.Union[str, datetime, int, float]] = None, + path: t.Optional[str] = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + charset: str = "utf-8", + sync_expires: bool = True, + max_size: int = 4093, + samesite: t.Optional[str] = None, +) -> str: + """Create a Set-Cookie header without the ``Set-Cookie`` prefix. + + The return value is usually restricted to ascii as the vast majority + of values are properly escaped, but that is no guarantee. It's + tunneled through latin1 as required by :pep:`3333`. + + The return value is not ASCII safe if the key contains unicode + characters. This is technically against the specification but + happens in the wild. It's strongly recommended to not use + non-ASCII values for the keys. + + :param max_age: should be a number of seconds, or `None` (default) if + the cookie should last only as long as the client's + browser session. Additionally `timedelta` objects + are accepted, too. + :param expires: should be a `datetime` object or unix timestamp. + :param path: limits the cookie to a given path, per default it will + span the whole domain. + :param domain: Use this if you want to set a cross-domain cookie. For + example, ``domain=".example.com"`` will set a cookie + that is readable by the domain ``www.example.com``, + ``foo.example.com`` etc. Otherwise, a cookie will only + be readable by the domain that set it. + :param secure: The cookie will only be available via HTTPS + :param httponly: disallow JavaScript to access the cookie. This is an + extension to the cookie standard and probably not + supported by all browsers. + :param charset: the encoding for string values. + :param sync_expires: automatically set expires if max_age is defined + but expires not. + :param max_size: Warn if the final header value exceeds this size. The + default, 4093, should be safely `supported by most browsers + `_. Set to 0 to disable this check. + :param samesite: Limits the scope of the cookie such that it will + only be attached to requests if those requests are same-site. + + .. _`cookie`: http://browsercookielimits.squawky.net/ + + .. versionchanged:: 1.0.0 + The string ``'None'`` is accepted for ``samesite``. + """ + key = _to_bytes(key, charset) + value = _to_bytes(value, charset) + + if path is not None: + from .urls import iri_to_uri + + path = iri_to_uri(path, charset) + + domain = _make_cookie_domain(domain) + + if isinstance(max_age, timedelta): + max_age = int(max_age.total_seconds()) + + if expires is not None: + if not isinstance(expires, str): + expires = http_date(expires) + elif max_age is not None and sync_expires: + expires = http_date(datetime.now(tz=timezone.utc).timestamp() + max_age) + + if samesite is not None: + samesite = samesite.title() + + if samesite not in {"Strict", "Lax", "None"}: + raise ValueError("SameSite must be 'Strict', 'Lax', or 'None'.") + + buf = [key + b"=" + _cookie_quote(value)] + + # XXX: In theory all of these parameters that are not marked with `None` + # should be quoted. Because stdlib did not quote it before I did not + # want to introduce quoting there now. + for k, v, q in ( + (b"Domain", domain, True), + (b"Expires", expires, False), + (b"Max-Age", max_age, False), + (b"Secure", secure, None), + (b"HttpOnly", httponly, None), + (b"Path", path, False), + (b"SameSite", samesite, False), + ): + if q is None: + if v: + buf.append(k) + continue + + if v is None: + continue + + tmp = bytearray(k) + if not isinstance(v, (bytes, bytearray)): + v = _to_bytes(str(v), charset) + if q: + v = _cookie_quote(v) + tmp += b"=" + v + buf.append(bytes(tmp)) + + # The return value will be an incorrectly encoded latin1 header for + # consistency with the headers object. + rv = b"; ".join(buf) + rv = rv.decode("latin1") + + # Warn if the final value of the cookie is larger than the limit. If the + # cookie is too large, then it may be silently ignored by the browser, + # which can be quite hard to debug. + cookie_size = len(rv) + + if max_size and cookie_size > max_size: + value_size = len(value) + warnings.warn( + f"The {key.decode(charset)!r} cookie is too large: the value was" + f" {value_size} bytes but the" + f" header required {cookie_size - value_size} extra bytes. The final size" + f" was {cookie_size} bytes but the limit is {max_size} bytes. Browsers may" + f" silently ignore cookies larger than this.", + stacklevel=2, + ) + + return rv + + +def is_byte_range_valid( + start: t.Optional[int], stop: t.Optional[int], length: t.Optional[int] +) -> bool: + """Checks if a given byte content range is valid for the given length. + + .. versionadded:: 0.7 + """ + if (start is None) != (stop is None): + return False + elif start is None: + return length is None or length >= 0 + elif length is None: + return 0 <= start < stop # type: ignore + elif start >= stop: # type: ignore + return False + return 0 <= start < length + + +# circular dependencies +from . import datastructures as ds +from .sansio import http as _sansio_http diff --git a/venv/lib/python3.8/site-packages/werkzeug/local.py b/venv/lib/python3.8/site-packages/werkzeug/local.py new file mode 100644 index 0000000..70e9bf7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/local.py @@ -0,0 +1,648 @@ +import copy +import math +import operator +import typing as t +from contextvars import ContextVar +from functools import partial +from functools import update_wrapper +from operator import attrgetter + +from .wsgi import ClosingIterator + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + +T = t.TypeVar("T") +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + + +def release_local(local: t.Union["Local", "LocalStack"]) -> None: + """Release the data for the current context in a :class:`Local` or + :class:`LocalStack` without using a :class:`LocalManager`. + + This should not be needed for modern use cases, and may be removed + in the future. + + .. versionadded:: 0.6.1 + """ + local.__release_local__() + + +class Local: + """Create a namespace of context-local data. This wraps a + :class:`ContextVar` containing a :class:`dict` value. + + This may incur a performance penalty compared to using individual + context vars, as it has to copy data to avoid mutating the dict + between nested contexts. + + :param context_var: The :class:`~contextvars.ContextVar` to use as + storage for this local. If not given, one will be created. + Context vars not created at the global scope may interfere with + garbage collection. + + .. versionchanged:: 2.0 + Uses ``ContextVar`` instead of a custom storage implementation. + """ + + __slots__ = ("__storage",) + + def __init__( + self, context_var: t.Optional[ContextVar[t.Dict[str, t.Any]]] = None + ) -> None: + if context_var is None: + # A ContextVar not created at global scope interferes with + # Python's garbage collection. However, a local only makes + # sense defined at the global scope as well, in which case + # the GC issue doesn't seem relevant. + context_var = ContextVar(f"werkzeug.Local<{id(self)}>.storage") + + object.__setattr__(self, "_Local__storage", context_var) + + def __iter__(self) -> t.Iterator[t.Tuple[str, t.Any]]: + return iter(self.__storage.get({}).items()) + + def __call__( + self, name: str, *, unbound_message: t.Optional[str] = None + ) -> "LocalProxy": + """Create a :class:`LocalProxy` that access an attribute on this + local namespace. + + :param name: Proxy this attribute. + :param unbound_message: The error message that the proxy will + show if the attribute isn't set. + """ + return LocalProxy(self, name, unbound_message=unbound_message) + + def __release_local__(self) -> None: + self.__storage.set({}) + + def __getattr__(self, name: str) -> t.Any: + values = self.__storage.get({}) + + if name in values: + return values[name] + + raise AttributeError(name) + + def __setattr__(self, name: str, value: t.Any) -> None: + values = self.__storage.get({}).copy() + values[name] = value + self.__storage.set(values) + + def __delattr__(self, name: str) -> None: + values = self.__storage.get({}) + + if name in values: + values = values.copy() + del values[name] + self.__storage.set(values) + else: + raise AttributeError(name) + + +class LocalStack(t.Generic[T]): + """Create a stack of context-local data. This wraps a + :class:`ContextVar` containing a :class:`list` value. + + This may incur a performance penalty compared to using individual + context vars, as it has to copy data to avoid mutating the list + between nested contexts. + + :param context_var: The :class:`~contextvars.ContextVar` to use as + storage for this local. If not given, one will be created. + Context vars not created at the global scope may interfere with + garbage collection. + + .. versionchanged:: 2.0 + Uses ``ContextVar`` instead of a custom storage implementation. + + .. versionadded:: 0.6.1 + """ + + __slots__ = ("_storage",) + + def __init__(self, context_var: t.Optional[ContextVar[t.List[T]]] = None) -> None: + if context_var is None: + # A ContextVar not created at global scope interferes with + # Python's garbage collection. However, a local only makes + # sense defined at the global scope as well, in which case + # the GC issue doesn't seem relevant. + context_var = ContextVar(f"werkzeug.LocalStack<{id(self)}>.storage") + + self._storage = context_var + + def __release_local__(self) -> None: + self._storage.set([]) + + def push(self, obj: T) -> t.List[T]: + """Add a new item to the top of the stack.""" + stack = self._storage.get([]).copy() + stack.append(obj) + self._storage.set(stack) + return stack + + def pop(self) -> t.Optional[T]: + """Remove the top item from the stack and return it. If the + stack is empty, return ``None``. + """ + stack = self._storage.get([]) + + if len(stack) == 0: + return None + + rv = stack[-1] + self._storage.set(stack[:-1]) + return rv + + @property + def top(self) -> t.Optional[T]: + """The topmost item on the stack. If the stack is empty, + `None` is returned. + """ + stack = self._storage.get([]) + + if len(stack) == 0: + return None + + return stack[-1] + + def __call__( + self, name: t.Optional[str] = None, *, unbound_message: t.Optional[str] = None + ) -> "LocalProxy": + """Create a :class:`LocalProxy` that accesses the top of this + local stack. + + :param name: If given, the proxy access this attribute of the + top item, rather than the item itself. + :param unbound_message: The error message that the proxy will + show if the stack is empty. + """ + return LocalProxy(self, name, unbound_message=unbound_message) + + +class LocalManager: + """Manage releasing the data for the current context in one or more + :class:`Local` and :class:`LocalStack` objects. + + This should not be needed for modern use cases, and may be removed + in the future. + + :param locals: A local or list of locals to manage. + + .. versionchanged:: 2.0 + ``ident_func`` is deprecated and will be removed in Werkzeug + 2.1. + + .. versionchanged:: 0.7 + The ``ident_func`` parameter was added. + + .. versionchanged:: 0.6.1 + The :func:`release_local` function can be used instead of a + manager. + """ + + __slots__ = ("locals",) + + def __init__( + self, + locals: t.Optional[ + t.Union[Local, LocalStack, t.Iterable[t.Union[Local, LocalStack]]] + ] = None, + ) -> None: + if locals is None: + self.locals = [] + elif isinstance(locals, Local): + self.locals = [locals] + else: + self.locals = list(locals) # type: ignore[arg-type] + + def cleanup(self) -> None: + """Release the data in the locals for this context. Call this at + the end of each request or use :meth:`make_middleware`. + """ + for local in self.locals: + release_local(local) + + def make_middleware(self, app: "WSGIApplication") -> "WSGIApplication": + """Wrap a WSGI application so that local data is released + automatically after the response has been sent for a request. + """ + + def application( + environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + return ClosingIterator(app(environ, start_response), self.cleanup) + + return application + + def middleware(self, func: "WSGIApplication") -> "WSGIApplication": + """Like :meth:`make_middleware` but used as a decorator on the + WSGI application function. + + .. code-block:: python + + @manager.middleware + def application(environ, start_response): + ... + """ + return update_wrapper(self.make_middleware(func), func) + + def __repr__(self) -> str: + return f"<{type(self).__name__} storages: {len(self.locals)}>" + + +class _ProxyLookup: + """Descriptor that handles proxied attribute lookup for + :class:`LocalProxy`. + + :param f: The built-in function this attribute is accessed through. + Instead of looking up the special method, the function call + is redone on the object. + :param fallback: Return this function if the proxy is unbound + instead of raising a :exc:`RuntimeError`. + :param is_attr: This proxied name is an attribute, not a function. + Call the fallback immediately to get the value. + :param class_value: Value to return when accessed from the + ``LocalProxy`` class directly. Used for ``__doc__`` so building + docs still works. + """ + + __slots__ = ("bind_f", "fallback", "is_attr", "class_value", "name") + + def __init__( + self, + f: t.Optional[t.Callable] = None, + fallback: t.Optional[t.Callable] = None, + class_value: t.Optional[t.Any] = None, + is_attr: bool = False, + ) -> None: + bind_f: t.Optional[t.Callable[["LocalProxy", t.Any], t.Callable]] + + if hasattr(f, "__get__"): + # A Python function, can be turned into a bound method. + + def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable: + return f.__get__(obj, type(obj)) # type: ignore + + elif f is not None: + # A C function, use partial to bind the first argument. + + def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable: + return partial(f, obj) # type: ignore + + else: + # Use getattr, which will produce a bound method. + bind_f = None + + self.bind_f = bind_f + self.fallback = fallback + self.class_value = class_value + self.is_attr = is_attr + + def __set_name__(self, owner: "LocalProxy", name: str) -> None: + self.name = name + + def __get__(self, instance: "LocalProxy", owner: t.Optional[type] = None) -> t.Any: + if instance is None: + if self.class_value is not None: + return self.class_value + + return self + + try: + obj = instance._get_current_object() # type: ignore[misc] + except RuntimeError: + if self.fallback is None: + raise + + fallback = self.fallback.__get__(instance, owner) + + if self.is_attr: + # __class__ and __doc__ are attributes, not methods. + # Call the fallback to get the value. + return fallback() + + return fallback + + if self.bind_f is not None: + return self.bind_f(instance, obj) + + return getattr(obj, self.name) + + def __repr__(self) -> str: + return f"proxy {self.name}" + + def __call__(self, instance: "LocalProxy", *args: t.Any, **kwargs: t.Any) -> t.Any: + """Support calling unbound methods from the class. For example, + this happens with ``copy.copy``, which does + ``type(x).__copy__(x)``. ``type(x)`` can't be proxied, so it + returns the proxy type and descriptor. + """ + return self.__get__(instance, type(instance))(*args, **kwargs) + + +class _ProxyIOp(_ProxyLookup): + """Look up an augmented assignment method on a proxied object. The + method is wrapped to return the proxy instead of the object. + """ + + __slots__ = () + + def __init__( + self, f: t.Optional[t.Callable] = None, fallback: t.Optional[t.Callable] = None + ) -> None: + super().__init__(f, fallback) + + def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable: + def i_op(self: t.Any, other: t.Any) -> "LocalProxy": + f(self, other) # type: ignore + return instance + + return i_op.__get__(obj, type(obj)) # type: ignore + + self.bind_f = bind_f + + +def _l_to_r_op(op: F) -> F: + """Swap the argument order to turn an l-op into an r-op.""" + + def r_op(obj: t.Any, other: t.Any) -> t.Any: + return op(other, obj) + + return t.cast(F, r_op) + + +def _identity(o: T) -> T: + return o + + +class LocalProxy(t.Generic[T]): + """A proxy to the object bound to a context-local object. All + operations on the proxy are forwarded to the bound object. If no + object is bound, a ``RuntimeError`` is raised. + + :param local: The context-local object that provides the proxied + object. + :param name: Proxy this attribute from the proxied object. + :param unbound_message: The error message to show if the + context-local object is unbound. + + Proxy a :class:`~contextvars.ContextVar` to make it easier to + access. Pass a name to proxy that attribute. + + .. code-block:: python + + _request_var = ContextVar("request") + request = LocalProxy(_request_var) + session = LocalProxy(_request_var, "session") + + Proxy an attribute on a :class:`Local` namespace by calling the + local with the attribute name: + + .. code-block:: python + + data = Local() + user = data("user") + + Proxy the top item on a :class:`LocalStack` by calling the local. + Pass a name to proxy that attribute. + + .. code-block:: + + app_stack = LocalStack() + current_app = app_stack() + g = app_stack("g") + + Pass a function to proxy the return value from that function. This + was previously used to access attributes of local objects before + that was supported directly. + + .. code-block:: python + + session = LocalProxy(lambda: request.session) + + ``__repr__`` and ``__class__`` are proxied, so ``repr(x)`` and + ``isinstance(x, cls)`` will look like the proxied object. Use + ``issubclass(type(x), LocalProxy)`` to check if an object is a + proxy. + + .. code-block:: python + + repr(user) # + isinstance(user, User) # True + issubclass(type(user), LocalProxy) # True + + .. versionchanged:: 2.2.2 + ``__wrapped__`` is set when wrapping an object, not only when + wrapping a function, to prevent doctest from failing. + + .. versionchanged:: 2.2 + Can proxy a ``ContextVar`` or ``LocalStack`` directly. + + .. versionchanged:: 2.2 + The ``name`` parameter can be used with any proxied object, not + only ``Local``. + + .. versionchanged:: 2.2 + Added the ``unbound_message`` parameter. + + .. versionchanged:: 2.0 + Updated proxied attributes and methods to reflect the current + data model. + + .. versionchanged:: 0.6.1 + The class can be instantiated with a callable. + """ + + __slots__ = ("__wrapped", "_get_current_object") + + _get_current_object: t.Callable[[], T] + """Return the current object this proxy is bound to. If the proxy is + unbound, this raises a ``RuntimeError``. + + This should be used if you need to pass the object to something that + doesn't understand the proxy. It can also be useful for performance + if you are accessing the object multiple times in a function, rather + than going through the proxy multiple times. + """ + + def __init__( + self, + local: t.Union[ContextVar[T], Local, LocalStack[T], t.Callable[[], T]], + name: t.Optional[str] = None, + *, + unbound_message: t.Optional[str] = None, + ) -> None: + if name is None: + get_name = _identity + else: + get_name = attrgetter(name) # type: ignore[assignment] + + if unbound_message is None: + unbound_message = "object is not bound" + + if isinstance(local, Local): + if name is None: + raise TypeError("'name' is required when proxying a 'Local' object.") + + def _get_current_object() -> T: + try: + return get_name(local) # type: ignore[return-value] + except AttributeError: + raise RuntimeError(unbound_message) from None + + elif isinstance(local, LocalStack): + + def _get_current_object() -> T: + obj = local.top # type: ignore[union-attr] + + if obj is None: + raise RuntimeError(unbound_message) + + return get_name(obj) + + elif isinstance(local, ContextVar): + + def _get_current_object() -> T: + try: + obj = local.get() # type: ignore[union-attr] + except LookupError: + raise RuntimeError(unbound_message) from None + + return get_name(obj) + + elif callable(local): + + def _get_current_object() -> T: + return get_name(local()) # type: ignore + + else: + raise TypeError(f"Don't know how to proxy '{type(local)}'.") + + object.__setattr__(self, "_LocalProxy__wrapped", local) + object.__setattr__(self, "_get_current_object", _get_current_object) + + __doc__ = _ProxyLookup( # type: ignore + class_value=__doc__, fallback=lambda self: type(self).__doc__, is_attr=True + ) + __wrapped__ = _ProxyLookup( + fallback=lambda self: self._LocalProxy__wrapped, is_attr=True + ) + # __del__ should only delete the proxy + __repr__ = _ProxyLookup( # type: ignore + repr, fallback=lambda self: f"<{type(self).__name__} unbound>" + ) + __str__ = _ProxyLookup(str) # type: ignore + __bytes__ = _ProxyLookup(bytes) + __format__ = _ProxyLookup() # type: ignore + __lt__ = _ProxyLookup(operator.lt) + __le__ = _ProxyLookup(operator.le) + __eq__ = _ProxyLookup(operator.eq) # type: ignore + __ne__ = _ProxyLookup(operator.ne) # type: ignore + __gt__ = _ProxyLookup(operator.gt) + __ge__ = _ProxyLookup(operator.ge) + __hash__ = _ProxyLookup(hash) # type: ignore + __bool__ = _ProxyLookup(bool, fallback=lambda self: False) + __getattr__ = _ProxyLookup(getattr) + # __getattribute__ triggered through __getattr__ + __setattr__ = _ProxyLookup(setattr) # type: ignore + __delattr__ = _ProxyLookup(delattr) # type: ignore + __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # type: ignore + # __get__ (proxying descriptor not supported) + # __set__ (descriptor) + # __delete__ (descriptor) + # __set_name__ (descriptor) + # __objclass__ (descriptor) + # __slots__ used by proxy itself + # __dict__ (__getattr__) + # __weakref__ (__getattr__) + # __init_subclass__ (proxying metaclass not supported) + # __prepare__ (metaclass) + __class__ = _ProxyLookup( + fallback=lambda self: type(self), is_attr=True + ) # type: ignore + __instancecheck__ = _ProxyLookup(lambda self, other: isinstance(other, self)) + __subclasscheck__ = _ProxyLookup(lambda self, other: issubclass(other, self)) + # __class_getitem__ triggered through __getitem__ + __call__ = _ProxyLookup(lambda self, *args, **kwargs: self(*args, **kwargs)) + __len__ = _ProxyLookup(len) + __length_hint__ = _ProxyLookup(operator.length_hint) + __getitem__ = _ProxyLookup(operator.getitem) + __setitem__ = _ProxyLookup(operator.setitem) + __delitem__ = _ProxyLookup(operator.delitem) + # __missing__ triggered through __getitem__ + __iter__ = _ProxyLookup(iter) + __next__ = _ProxyLookup(next) + __reversed__ = _ProxyLookup(reversed) + __contains__ = _ProxyLookup(operator.contains) + __add__ = _ProxyLookup(operator.add) + __sub__ = _ProxyLookup(operator.sub) + __mul__ = _ProxyLookup(operator.mul) + __matmul__ = _ProxyLookup(operator.matmul) + __truediv__ = _ProxyLookup(operator.truediv) + __floordiv__ = _ProxyLookup(operator.floordiv) + __mod__ = _ProxyLookup(operator.mod) + __divmod__ = _ProxyLookup(divmod) + __pow__ = _ProxyLookup(pow) + __lshift__ = _ProxyLookup(operator.lshift) + __rshift__ = _ProxyLookup(operator.rshift) + __and__ = _ProxyLookup(operator.and_) + __xor__ = _ProxyLookup(operator.xor) + __or__ = _ProxyLookup(operator.or_) + __radd__ = _ProxyLookup(_l_to_r_op(operator.add)) + __rsub__ = _ProxyLookup(_l_to_r_op(operator.sub)) + __rmul__ = _ProxyLookup(_l_to_r_op(operator.mul)) + __rmatmul__ = _ProxyLookup(_l_to_r_op(operator.matmul)) + __rtruediv__ = _ProxyLookup(_l_to_r_op(operator.truediv)) + __rfloordiv__ = _ProxyLookup(_l_to_r_op(operator.floordiv)) + __rmod__ = _ProxyLookup(_l_to_r_op(operator.mod)) + __rdivmod__ = _ProxyLookup(_l_to_r_op(divmod)) + __rpow__ = _ProxyLookup(_l_to_r_op(pow)) + __rlshift__ = _ProxyLookup(_l_to_r_op(operator.lshift)) + __rrshift__ = _ProxyLookup(_l_to_r_op(operator.rshift)) + __rand__ = _ProxyLookup(_l_to_r_op(operator.and_)) + __rxor__ = _ProxyLookup(_l_to_r_op(operator.xor)) + __ror__ = _ProxyLookup(_l_to_r_op(operator.or_)) + __iadd__ = _ProxyIOp(operator.iadd) + __isub__ = _ProxyIOp(operator.isub) + __imul__ = _ProxyIOp(operator.imul) + __imatmul__ = _ProxyIOp(operator.imatmul) + __itruediv__ = _ProxyIOp(operator.itruediv) + __ifloordiv__ = _ProxyIOp(operator.ifloordiv) + __imod__ = _ProxyIOp(operator.imod) + __ipow__ = _ProxyIOp(operator.ipow) + __ilshift__ = _ProxyIOp(operator.ilshift) + __irshift__ = _ProxyIOp(operator.irshift) + __iand__ = _ProxyIOp(operator.iand) + __ixor__ = _ProxyIOp(operator.ixor) + __ior__ = _ProxyIOp(operator.ior) + __neg__ = _ProxyLookup(operator.neg) + __pos__ = _ProxyLookup(operator.pos) + __abs__ = _ProxyLookup(abs) + __invert__ = _ProxyLookup(operator.invert) + __complex__ = _ProxyLookup(complex) + __int__ = _ProxyLookup(int) + __float__ = _ProxyLookup(float) + __index__ = _ProxyLookup(operator.index) + __round__ = _ProxyLookup(round) + __trunc__ = _ProxyLookup(math.trunc) + __floor__ = _ProxyLookup(math.floor) + __ceil__ = _ProxyLookup(math.ceil) + __enter__ = _ProxyLookup() + __exit__ = _ProxyLookup() + __await__ = _ProxyLookup() + __aiter__ = _ProxyLookup() + __anext__ = _ProxyLookup() + __aenter__ = _ProxyLookup() + __aexit__ = _ProxyLookup() + __copy__ = _ProxyLookup(copy.copy) + __deepcopy__ = _ProxyLookup(copy.deepcopy) + # __getnewargs_ex__ (pickle through proxy not supported) + # __getnewargs__ (pickle) + # __getstate__ (pickle) + # __setstate__ (pickle) + # __reduce__ (pickle) + # __reduce_ex__ (pickle) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py new file mode 120000 index 0000000..0cadbc6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a9/fa/a0/753e67a701bdb1eb37f855d025fdda07de4960fd737217ad1ff4f73d4c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..9b15758 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc new file mode 100644 index 0000000..3e02cb5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc new file mode 100644 index 0000000..f112f04 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/lint.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/lint.cpython-38.pyc new file mode 100644 index 0000000..afafe91 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/lint.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-38.pyc new file mode 100644 index 0000000..c40f61c Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc new file mode 100644 index 0000000..cadc9a4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc new file mode 100644 index 0000000..273e2e6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py new file mode 120000 index 0000000..febffd2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/16/1f/f0/f8ac969d349817e2dfbf9762990ed31d24bb69f3c064c9af738cc5d608 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py new file mode 120000 index 0000000..6035f82 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1c/4f/15/ca14bb091f84d4eebff5bebc86ebfc14b806191d432d8aa44b75dca774 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py new file mode 100644 index 0000000..6b54630 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py @@ -0,0 +1,420 @@ +""" +WSGI Protocol Linter +==================== + +This module provides a middleware that performs sanity checks on the +behavior of the WSGI server and application. It checks that the +:pep:`3333` WSGI spec is properly implemented. It also warns on some +common HTTP errors such as non-empty responses for 304 status codes. + +.. autoclass:: LintMiddleware + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +import typing as t +from types import TracebackType +from urllib.parse import urlparse +from warnings import warn + +from ..datastructures import Headers +from ..http import is_entity_header +from ..wsgi import FileWrapper + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class WSGIWarning(Warning): + """Warning class for WSGI warnings.""" + + +class HTTPWarning(Warning): + """Warning class for HTTP warnings.""" + + +def check_type(context: str, obj: object, need: t.Type = str) -> None: + if type(obj) is not need: + warn( + f"{context!r} requires {need.__name__!r}, got {type(obj).__name__!r}.", + WSGIWarning, + stacklevel=3, + ) + + +class InputStream: + def __init__(self, stream: t.IO[bytes]) -> None: + self._stream = stream + + def read(self, *args: t.Any) -> bytes: + if len(args) == 0: + warn( + "WSGI does not guarantee an EOF marker on the input stream, thus making" + " calls to 'wsgi.input.read()' unsafe. Conforming servers may never" + " return from this call.", + WSGIWarning, + stacklevel=2, + ) + elif len(args) != 1: + warn( + "Too many parameters passed to 'wsgi.input.read()'.", + WSGIWarning, + stacklevel=2, + ) + return self._stream.read(*args) + + def readline(self, *args: t.Any) -> bytes: + if len(args) == 0: + warn( + "Calls to 'wsgi.input.readline()' without arguments are unsafe. Use" + " 'wsgi.input.read()' instead.", + WSGIWarning, + stacklevel=2, + ) + elif len(args) == 1: + warn( + "'wsgi.input.readline()' was called with a size hint. WSGI does not" + " support this, although it's available on all major servers.", + WSGIWarning, + stacklevel=2, + ) + else: + raise TypeError("Too many arguments passed to 'wsgi.input.readline()'.") + return self._stream.readline(*args) + + def __iter__(self) -> t.Iterator[bytes]: + try: + return iter(self._stream) + except TypeError: + warn("'wsgi.input' is not iterable.", WSGIWarning, stacklevel=2) + return iter(()) + + def close(self) -> None: + warn("The application closed the input stream!", WSGIWarning, stacklevel=2) + self._stream.close() + + +class ErrorStream: + def __init__(self, stream: t.IO[str]) -> None: + self._stream = stream + + def write(self, s: str) -> None: + check_type("wsgi.error.write()", s, str) + self._stream.write(s) + + def flush(self) -> None: + self._stream.flush() + + def writelines(self, seq: t.Iterable[str]) -> None: + for line in seq: + self.write(line) + + def close(self) -> None: + warn("The application closed the error stream!", WSGIWarning, stacklevel=2) + self._stream.close() + + +class GuardedWrite: + def __init__(self, write: t.Callable[[bytes], object], chunks: t.List[int]) -> None: + self._write = write + self._chunks = chunks + + def __call__(self, s: bytes) -> None: + check_type("write()", s, bytes) + self._write(s) + self._chunks.append(len(s)) + + +class GuardedIterator: + def __init__( + self, + iterator: t.Iterable[bytes], + headers_set: t.Tuple[int, Headers], + chunks: t.List[int], + ) -> None: + self._iterator = iterator + self._next = iter(iterator).__next__ + self.closed = False + self.headers_set = headers_set + self.chunks = chunks + + def __iter__(self) -> "GuardedIterator": + return self + + def __next__(self) -> bytes: + if self.closed: + warn("Iterated over closed 'app_iter'.", WSGIWarning, stacklevel=2) + + rv = self._next() + + if not self.headers_set: + warn( + "The application returned before it started the response.", + WSGIWarning, + stacklevel=2, + ) + + check_type("application iterator items", rv, bytes) + self.chunks.append(len(rv)) + return rv + + def close(self) -> None: + self.closed = True + + if hasattr(self._iterator, "close"): + self._iterator.close() # type: ignore + + if self.headers_set: + status_code, headers = self.headers_set + bytes_sent = sum(self.chunks) + content_length = headers.get("content-length", type=int) + + if status_code == 304: + for key, _value in headers: + key = key.lower() + if key not in ("expires", "content-location") and is_entity_header( + key + ): + warn( + f"Entity header {key!r} found in 304 response.", HTTPWarning + ) + if bytes_sent: + warn("304 responses must not have a body.", HTTPWarning) + elif 100 <= status_code < 200 or status_code == 204: + if content_length != 0: + warn( + f"{status_code} responses must have an empty content length.", + HTTPWarning, + ) + if bytes_sent: + warn(f"{status_code} responses must not have a body.", HTTPWarning) + elif content_length is not None and content_length != bytes_sent: + warn( + "Content-Length and the number of bytes sent to the" + " client do not match.", + WSGIWarning, + ) + + def __del__(self) -> None: + if not self.closed: + try: + warn( + "Iterator was garbage collected before it was closed.", WSGIWarning + ) + except Exception: + pass + + +class LintMiddleware: + """Warns about common errors in the WSGI and HTTP behavior of the + server and wrapped application. Some of the issues it checks are: + + - invalid status codes + - non-bytes sent to the WSGI server + - strings returned from the WSGI application + - non-empty conditional responses + - unquoted etags + - relative URLs in the Location header + - unsafe calls to wsgi.input + - unclosed iterators + + Error information is emitted using the :mod:`warnings` module. + + :param app: The WSGI application to wrap. + + .. code-block:: python + + from werkzeug.middleware.lint import LintMiddleware + app = LintMiddleware(app) + """ + + def __init__(self, app: "WSGIApplication") -> None: + self.app = app + + def check_environ(self, environ: "WSGIEnvironment") -> None: + if type(environ) is not dict: + warn( + "WSGI environment is not a standard Python dict.", + WSGIWarning, + stacklevel=4, + ) + for key in ( + "REQUEST_METHOD", + "SERVER_NAME", + "SERVER_PORT", + "wsgi.version", + "wsgi.input", + "wsgi.errors", + "wsgi.multithread", + "wsgi.multiprocess", + "wsgi.run_once", + ): + if key not in environ: + warn( + f"Required environment key {key!r} not found", + WSGIWarning, + stacklevel=3, + ) + if environ["wsgi.version"] != (1, 0): + warn("Environ is not a WSGI 1.0 environ.", WSGIWarning, stacklevel=3) + + script_name = environ.get("SCRIPT_NAME", "") + path_info = environ.get("PATH_INFO", "") + + if script_name and script_name[0] != "/": + warn( + f"'SCRIPT_NAME' does not start with a slash: {script_name!r}", + WSGIWarning, + stacklevel=3, + ) + + if path_info and path_info[0] != "/": + warn( + f"'PATH_INFO' does not start with a slash: {path_info!r}", + WSGIWarning, + stacklevel=3, + ) + + def check_start_response( + self, + status: str, + headers: t.List[t.Tuple[str, str]], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ], + ) -> t.Tuple[int, Headers]: + check_type("status", status, str) + status_code_str = status.split(None, 1)[0] + + if len(status_code_str) != 3 or not status_code_str.isdecimal(): + warn("Status code must be three digits.", WSGIWarning, stacklevel=3) + + if len(status) < 4 or status[3] != " ": + warn( + f"Invalid value for status {status!r}. Valid status strings are three" + " digits, a space and a status explanation.", + WSGIWarning, + stacklevel=3, + ) + + status_code = int(status_code_str) + + if status_code < 100: + warn("Status code < 100 detected.", WSGIWarning, stacklevel=3) + + if type(headers) is not list: + warn("Header list is not a list.", WSGIWarning, stacklevel=3) + + for item in headers: + if type(item) is not tuple or len(item) != 2: + warn("Header items must be 2-item tuples.", WSGIWarning, stacklevel=3) + name, value = item + if type(name) is not str or type(value) is not str: + warn( + "Header keys and values must be strings.", WSGIWarning, stacklevel=3 + ) + if name.lower() == "status": + warn( + "The status header is not supported due to" + " conflicts with the CGI spec.", + WSGIWarning, + stacklevel=3, + ) + + if exc_info is not None and not isinstance(exc_info, tuple): + warn("Invalid value for exc_info.", WSGIWarning, stacklevel=3) + + headers = Headers(headers) + self.check_headers(headers) + + return status_code, headers + + def check_headers(self, headers: Headers) -> None: + etag = headers.get("etag") + + if etag is not None: + if etag.startswith(("W/", "w/")): + if etag.startswith("w/"): + warn( + "Weak etag indicator should be upper case.", + HTTPWarning, + stacklevel=4, + ) + + etag = etag[2:] + + if not (etag[:1] == etag[-1:] == '"'): + warn("Unquoted etag emitted.", HTTPWarning, stacklevel=4) + + location = headers.get("location") + + if location is not None: + if not urlparse(location).netloc: + warn( + "Absolute URLs required for location header.", + HTTPWarning, + stacklevel=4, + ) + + def check_iterator(self, app_iter: t.Iterable[bytes]) -> None: + if isinstance(app_iter, bytes): + warn( + "The application returned a bytestring. The response will send one" + " character at a time to the client, which will kill performance." + " Return a list or iterable instead.", + WSGIWarning, + stacklevel=3, + ) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Iterable[bytes]: + if len(args) != 2: + warn("A WSGI app takes two arguments.", WSGIWarning, stacklevel=2) + + if kwargs: + warn( + "A WSGI app does not take keyword arguments.", WSGIWarning, stacklevel=2 + ) + + environ: "WSGIEnvironment" = args[0] + start_response: "StartResponse" = args[1] + + self.check_environ(environ) + environ["wsgi.input"] = InputStream(environ["wsgi.input"]) + environ["wsgi.errors"] = ErrorStream(environ["wsgi.errors"]) + + # Hook our own file wrapper in so that applications will always + # iterate to the end and we can check the content length. + environ["wsgi.file_wrapper"] = FileWrapper + + headers_set: t.List[t.Any] = [] + chunks: t.List[int] = [] + + def checking_start_response( + *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[bytes], None]: + if len(args) not in {2, 3}: + warn( + f"Invalid number of arguments: {len(args)}, expected 2 or 3.", + WSGIWarning, + stacklevel=2, + ) + + if kwargs: + warn("'start_response' does not take keyword arguments.", WSGIWarning) + + status: str = args[0] + headers: t.List[t.Tuple[str, str]] = args[1] + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = (args[2] if len(args) == 3 else None) + + headers_set[:] = self.check_start_response(status, headers, exc_info) + return GuardedWrite(start_response(status, headers, exc_info), chunks) + + app_iter = self.app(environ, t.cast("StartResponse", checking_start_response)) + self.check_iterator(app_iter) + return GuardedIterator( + app_iter, t.cast(t.Tuple[int, Headers], headers_set), chunks + ) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py new file mode 120000 index 0000000..411bec5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/42/45/e4/edcaa768f9c5f3042efb94b23c220e4f7fe4740054068ad03a085534e0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py new file mode 120000 index 0000000..a10668b --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/97/b2/c2/fcb0eed187784af5314b94858a00093337083869ba2cd2a5f485c90525 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py new file mode 120000 index 0000000..7922512 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7d/78/eb/124baac5454b1b50cbace7502dcf7a4107637ed081675a0ce682bcf61e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/py.typed b/venv/lib/python3.8/site-packages/werkzeug/py.typed new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/py.typed @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/routing/__init__.py new file mode 100644 index 0000000..84b043f --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/routing/__init__.py @@ -0,0 +1,133 @@ +"""When it comes to combining multiple controller or view functions +(however you want to call them) you need a dispatcher. A simple way +would be applying regular expression tests on the ``PATH_INFO`` and +calling registered callback functions that return the value then. + +This module implements a much more powerful system than simple regular +expression matching because it can also convert values in the URLs and +build URLs. + +Here a simple example that creates a URL map for an application with +two subdomains (www and kb) and some URL rules: + +.. code-block:: python + + m = Map([ + # Static URLs + Rule('/', endpoint='static/index'), + Rule('/about', endpoint='static/about'), + Rule('/help', endpoint='static/help'), + # Knowledge Base + Subdomain('kb', [ + Rule('/', endpoint='kb/index'), + Rule('/browse/', endpoint='kb/browse'), + Rule('/browse//', endpoint='kb/browse'), + Rule('/browse//', endpoint='kb/browse') + ]) + ], default_subdomain='www') + +If the application doesn't use subdomains it's perfectly fine to not set +the default subdomain and not use the `Subdomain` rule factory. The +endpoint in the rules can be anything, for example import paths or +unique identifiers. The WSGI application can use those endpoints to get the +handler for that URL. It doesn't have to be a string at all but it's +recommended. + +Now it's possible to create a URL adapter for one of the subdomains and +build URLs: + +.. code-block:: python + + c = m.bind('example.com') + + c.build("kb/browse", dict(id=42)) + 'http://kb.example.com/browse/42/' + + c.build("kb/browse", dict()) + 'http://kb.example.com/browse/' + + c.build("kb/browse", dict(id=42, page=3)) + 'http://kb.example.com/browse/42/3' + + c.build("static/about") + '/about' + + c.build("static/index", force_external=True) + 'http://www.example.com/' + + c = m.bind('example.com', subdomain='kb') + + c.build("static/about") + 'http://www.example.com/about' + +The first argument to bind is the server name *without* the subdomain. +Per default it will assume that the script is mounted on the root, but +often that's not the case so you can provide the real mount point as +second argument: + +.. code-block:: python + + c = m.bind('example.com', '/applications/example') + +The third argument can be the subdomain, if not given the default +subdomain is used. For more details about binding have a look at the +documentation of the `MapAdapter`. + +And here is how you can match URLs: + +.. code-block:: python + + c = m.bind('example.com') + + c.match("/") + ('static/index', {}) + + c.match("/about") + ('static/about', {}) + + c = m.bind('example.com', '/', 'kb') + + c.match("/") + ('kb/index', {}) + + c.match("/browse/42/23") + ('kb/browse', {'id': 42, 'page': 23}) + +If matching fails you get a ``NotFound`` exception, if the rule thinks +it's a good idea to redirect (for example because the URL was defined +to have a slash at the end but the request was missing that slash) it +will raise a ``RequestRedirect`` exception. Both are subclasses of +``HTTPException`` so you can use those errors as responses in the +application. + +If matching succeeded but the URL rule was incompatible to the given +method (for example there were only rules for ``GET`` and ``HEAD`` but +routing tried to match a ``POST`` request) a ``MethodNotAllowed`` +exception is raised. +""" +from .converters import AnyConverter as AnyConverter +from .converters import BaseConverter as BaseConverter +from .converters import FloatConverter as FloatConverter +from .converters import IntegerConverter as IntegerConverter +from .converters import PathConverter as PathConverter +from .converters import UnicodeConverter as UnicodeConverter +from .converters import UUIDConverter as UUIDConverter +from .converters import ValidationError as ValidationError +from .exceptions import BuildError as BuildError +from .exceptions import NoMatch as NoMatch +from .exceptions import RequestAliasRedirect as RequestAliasRedirect +from .exceptions import RequestPath as RequestPath +from .exceptions import RequestRedirect as RequestRedirect +from .exceptions import RoutingException as RoutingException +from .exceptions import WebsocketMismatch as WebsocketMismatch +from .map import Map as Map +from .map import MapAdapter as MapAdapter +from .matcher import StateMachineMatcher as StateMachineMatcher +from .rules import EndpointPrefix as EndpointPrefix +from .rules import parse_converter_args as parse_converter_args +from .rules import Rule as Rule +from .rules import RuleFactory as RuleFactory +from .rules import RuleTemplate as RuleTemplate +from .rules import RuleTemplateFactory as RuleTemplateFactory +from .rules import Subdomain as Subdomain +from .rules import Submount as Submount diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4d00f40 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/converters.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/converters.cpython-38.pyc new file mode 100644 index 0000000..289f0f0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/converters.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..90fc928 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/map.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/map.cpython-38.pyc new file mode 100644 index 0000000..7a72980 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/map.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/matcher.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/matcher.cpython-38.pyc new file mode 100644 index 0000000..3ab478b Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/matcher.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/rules.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/rules.cpython-38.pyc new file mode 100644 index 0000000..a64ed50 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/routing/__pycache__/rules.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/converters.py b/venv/lib/python3.8/site-packages/werkzeug/routing/converters.py new file mode 100644 index 0000000..bbad29d --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/routing/converters.py @@ -0,0 +1,257 @@ +import re +import typing as t +import uuid + +from ..urls import _fast_url_quote + +if t.TYPE_CHECKING: + from .map import Map + + +class ValidationError(ValueError): + """Validation error. If a rule converter raises this exception the rule + does not match the current URL and the next URL is tried. + """ + + +class BaseConverter: + """Base class for all converters.""" + + regex = "[^/]+" + weight = 100 + part_isolating = True + + def __init__(self, map: "Map", *args: t.Any, **kwargs: t.Any) -> None: + self.map = map + + def to_python(self, value: str) -> t.Any: + return value + + def to_url(self, value: t.Any) -> str: + if isinstance(value, (bytes, bytearray)): + return _fast_url_quote(value) + return _fast_url_quote(str(value).encode(self.map.charset)) + + +class UnicodeConverter(BaseConverter): + """This converter is the default converter and accepts any string but + only one path segment. Thus the string can not include a slash. + + This is the default validator. + + Example:: + + Rule('/pages/'), + Rule('/') + + :param map: the :class:`Map`. + :param minlength: the minimum length of the string. Must be greater + or equal 1. + :param maxlength: the maximum length of the string. + :param length: the exact length of the string. + """ + + part_isolating = True + + def __init__( + self, + map: "Map", + minlength: int = 1, + maxlength: t.Optional[int] = None, + length: t.Optional[int] = None, + ) -> None: + super().__init__(map) + if length is not None: + length_regex = f"{{{int(length)}}}" + else: + if maxlength is None: + maxlength_value = "" + else: + maxlength_value = str(int(maxlength)) + length_regex = f"{{{int(minlength)},{maxlength_value}}}" + self.regex = f"[^/]{length_regex}" + + +class AnyConverter(BaseConverter): + """Matches one of the items provided. Items can either be Python + identifiers or strings:: + + Rule('/') + + :param map: the :class:`Map`. + :param items: this function accepts the possible items as positional + arguments. + + .. versionchanged:: 2.2 + Value is validated when building a URL. + """ + + part_isolating = True + + def __init__(self, map: "Map", *items: str) -> None: + super().__init__(map) + self.items = set(items) + self.regex = f"(?:{'|'.join([re.escape(x) for x in items])})" + + def to_url(self, value: t.Any) -> str: + if value in self.items: + return str(value) + + valid_values = ", ".join(f"'{item}'" for item in sorted(self.items)) + raise ValueError(f"'{value}' is not one of {valid_values}") + + +class PathConverter(BaseConverter): + """Like the default :class:`UnicodeConverter`, but it also matches + slashes. This is useful for wikis and similar applications:: + + Rule('/') + Rule('//edit') + + :param map: the :class:`Map`. + """ + + regex = "[^/].*?" + weight = 200 + part_isolating = False + + +class NumberConverter(BaseConverter): + """Baseclass for `IntegerConverter` and `FloatConverter`. + + :internal: + """ + + weight = 50 + num_convert: t.Callable = int + part_isolating = True + + def __init__( + self, + map: "Map", + fixed_digits: int = 0, + min: t.Optional[int] = None, + max: t.Optional[int] = None, + signed: bool = False, + ) -> None: + if signed: + self.regex = self.signed_regex + super().__init__(map) + self.fixed_digits = fixed_digits + self.min = min + self.max = max + self.signed = signed + + def to_python(self, value: str) -> t.Any: + if self.fixed_digits and len(value) != self.fixed_digits: + raise ValidationError() + value = self.num_convert(value) + if (self.min is not None and value < self.min) or ( + self.max is not None and value > self.max + ): + raise ValidationError() + return value + + def to_url(self, value: t.Any) -> str: + value = str(self.num_convert(value)) + if self.fixed_digits: + value = value.zfill(self.fixed_digits) + return value + + @property + def signed_regex(self) -> str: + return f"-?{self.regex}" + + +class IntegerConverter(NumberConverter): + """This converter only accepts integer values:: + + Rule("/page/") + + By default it only accepts unsigned, positive values. The ``signed`` + parameter will enable signed, negative values. :: + + Rule("/page/") + + :param map: The :class:`Map`. + :param fixed_digits: The number of fixed digits in the URL. If you + set this to ``4`` for example, the rule will only match if the + URL looks like ``/0001/``. The default is variable length. + :param min: The minimal value. + :param max: The maximal value. + :param signed: Allow signed (negative) values. + + .. versionadded:: 0.15 + The ``signed`` parameter. + """ + + regex = r"\d+" + part_isolating = True + + +class FloatConverter(NumberConverter): + """This converter only accepts floating point values:: + + Rule("/probability/") + + By default it only accepts unsigned, positive values. The ``signed`` + parameter will enable signed, negative values. :: + + Rule("/offset/") + + :param map: The :class:`Map`. + :param min: The minimal value. + :param max: The maximal value. + :param signed: Allow signed (negative) values. + + .. versionadded:: 0.15 + The ``signed`` parameter. + """ + + regex = r"\d+\.\d+" + num_convert = float + part_isolating = True + + def __init__( + self, + map: "Map", + min: t.Optional[float] = None, + max: t.Optional[float] = None, + signed: bool = False, + ) -> None: + super().__init__(map, min=min, max=max, signed=signed) # type: ignore + + +class UUIDConverter(BaseConverter): + """This converter only accepts UUID strings:: + + Rule('/object/') + + .. versionadded:: 0.10 + + :param map: the :class:`Map`. + """ + + regex = ( + r"[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-" + r"[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}" + ) + part_isolating = True + + def to_python(self, value: str) -> uuid.UUID: + return uuid.UUID(value) + + def to_url(self, value: uuid.UUID) -> str: + return str(value) + + +#: the default converter mapping for the map. +DEFAULT_CONVERTERS: t.Mapping[str, t.Type[BaseConverter]] = { + "default": UnicodeConverter, + "string": UnicodeConverter, + "any": AnyConverter, + "path": PathConverter, + "int": IntegerConverter, + "float": FloatConverter, + "uuid": UUIDConverter, +} diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/exceptions.py b/venv/lib/python3.8/site-packages/werkzeug/routing/exceptions.py new file mode 100644 index 0000000..7cbe6e9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/routing/exceptions.py @@ -0,0 +1,146 @@ +import difflib +import typing as t + +from ..exceptions import BadRequest +from ..exceptions import HTTPException +from ..utils import cached_property +from ..utils import redirect + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIEnvironment + from .map import MapAdapter + from .rules import Rule # noqa: F401 + from ..wrappers.request import Request + from ..wrappers.response import Response + + +class RoutingException(Exception): + """Special exceptions that require the application to redirect, notifying + about missing urls, etc. + + :internal: + """ + + +class RequestRedirect(HTTPException, RoutingException): + """Raise if the map requests a redirect. This is for example the case if + `strict_slashes` are activated and an url that requires a trailing slash. + + The attribute `new_url` contains the absolute destination url. + """ + + code = 308 + + def __init__(self, new_url: str) -> None: + super().__init__(new_url) + self.new_url = new_url + + def get_response( + self, + environ: t.Optional[t.Union["WSGIEnvironment", "Request"]] = None, + scope: t.Optional[dict] = None, + ) -> "Response": + return redirect(self.new_url, self.code) + + +class RequestPath(RoutingException): + """Internal exception.""" + + __slots__ = ("path_info",) + + def __init__(self, path_info: str) -> None: + super().__init__() + self.path_info = path_info + + +class RequestAliasRedirect(RoutingException): # noqa: B903 + """This rule is an alias and wants to redirect to the canonical URL.""" + + def __init__(self, matched_values: t.Mapping[str, t.Any], endpoint: str) -> None: + super().__init__() + self.matched_values = matched_values + self.endpoint = endpoint + + +class BuildError(RoutingException, LookupError): + """Raised if the build system cannot find a URL for an endpoint with the + values provided. + """ + + def __init__( + self, + endpoint: str, + values: t.Mapping[str, t.Any], + method: t.Optional[str], + adapter: t.Optional["MapAdapter"] = None, + ) -> None: + super().__init__(endpoint, values, method) + self.endpoint = endpoint + self.values = values + self.method = method + self.adapter = adapter + + @cached_property + def suggested(self) -> t.Optional["Rule"]: + return self.closest_rule(self.adapter) + + def closest_rule(self, adapter: t.Optional["MapAdapter"]) -> t.Optional["Rule"]: + def _score_rule(rule: "Rule") -> float: + return sum( + [ + 0.98 + * difflib.SequenceMatcher( + None, rule.endpoint, self.endpoint + ).ratio(), + 0.01 * bool(set(self.values or ()).issubset(rule.arguments)), + 0.01 * bool(rule.methods and self.method in rule.methods), + ] + ) + + if adapter and adapter.map._rules: + return max(adapter.map._rules, key=_score_rule) + + return None + + def __str__(self) -> str: + message = [f"Could not build url for endpoint {self.endpoint!r}"] + if self.method: + message.append(f" ({self.method!r})") + if self.values: + message.append(f" with values {sorted(self.values)!r}") + message.append(".") + if self.suggested: + if self.endpoint == self.suggested.endpoint: + if ( + self.method + and self.suggested.methods is not None + and self.method not in self.suggested.methods + ): + message.append( + " Did you mean to use methods" + f" {sorted(self.suggested.methods)!r}?" + ) + missing_values = self.suggested.arguments.union( + set(self.suggested.defaults or ()) + ) - set(self.values.keys()) + if missing_values: + message.append( + f" Did you forget to specify values {sorted(missing_values)!r}?" + ) + else: + message.append(f" Did you mean {self.suggested.endpoint!r} instead?") + return "".join(message) + + +class WebsocketMismatch(BadRequest): + """The only matched rule is either a WebSocket and the request is + HTTP, or the rule is HTTP and the request is a WebSocket. + """ + + +class NoMatch(Exception): + __slots__ = ("have_match_for", "websocket_mismatch") + + def __init__(self, have_match_for: t.Set[str], websocket_mismatch: bool) -> None: + self.have_match_for = have_match_for + self.websocket_mismatch = websocket_mismatch diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/map.py b/venv/lib/python3.8/site-packages/werkzeug/routing/map.py new file mode 100644 index 0000000..daf94b6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/routing/map.py @@ -0,0 +1,944 @@ +import posixpath +import typing as t +import warnings +from pprint import pformat +from threading import Lock + +from .._internal import _encode_idna +from .._internal import _get_environ +from .._internal import _to_str +from .._internal import _wsgi_decoding_dance +from ..datastructures import ImmutableDict +from ..datastructures import MultiDict +from ..exceptions import BadHost +from ..exceptions import HTTPException +from ..exceptions import MethodNotAllowed +from ..exceptions import NotFound +from ..urls import url_encode +from ..urls import url_join +from ..urls import url_quote +from ..wsgi import get_host +from .converters import DEFAULT_CONVERTERS +from .exceptions import BuildError +from .exceptions import NoMatch +from .exceptions import RequestAliasRedirect +from .exceptions import RequestPath +from .exceptions import RequestRedirect +from .exceptions import WebsocketMismatch +from .matcher import StateMachineMatcher +from .rules import _simple_rule_re +from .rules import Rule + +if t.TYPE_CHECKING: + import typing_extensions as te + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + from .converters import BaseConverter + from .rules import RuleFactory + from ..wrappers.request import Request + + +class Map: + """The map class stores all the URL rules and some configuration + parameters. Some of the configuration values are only stored on the + `Map` instance since those affect all rules, others are just defaults + and can be overridden for each rule. Note that you have to specify all + arguments besides the `rules` as keyword arguments! + + :param rules: sequence of url rules for this map. + :param default_subdomain: The default subdomain for rules without a + subdomain defined. + :param charset: charset of the url. defaults to ``"utf-8"`` + :param strict_slashes: If a rule ends with a slash but the matched + URL does not, redirect to the URL with a trailing slash. + :param merge_slashes: Merge consecutive slashes when matching or + building URLs. Matches will redirect to the normalized URL. + Slashes in variable parts are not merged. + :param redirect_defaults: This will redirect to the default rule if it + wasn't visited that way. This helps creating + unique URLs. + :param converters: A dict of converters that adds additional converters + to the list of converters. If you redefine one + converter this will override the original one. + :param sort_parameters: If set to `True` the url parameters are sorted. + See `url_encode` for more details. + :param sort_key: The sort key function for `url_encode`. + :param encoding_errors: the error method to use for decoding + :param host_matching: if set to `True` it enables the host matching + feature and disables the subdomain one. If + enabled the `host` parameter to rules is used + instead of the `subdomain` one. + + .. versionchanged:: 1.0 + If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules + will match. + + .. versionchanged:: 1.0 + Added ``merge_slashes``. + + .. versionchanged:: 0.7 + Added ``encoding_errors`` and ``host_matching``. + + .. versionchanged:: 0.5 + Added ``sort_parameters`` and ``sort_key``. + """ + + #: A dict of default converters to be used. + default_converters = ImmutableDict(DEFAULT_CONVERTERS) + + #: The type of lock to use when updating. + #: + #: .. versionadded:: 1.0 + lock_class = Lock + + def __init__( + self, + rules: t.Optional[t.Iterable["RuleFactory"]] = None, + default_subdomain: str = "", + charset: str = "utf-8", + strict_slashes: bool = True, + merge_slashes: bool = True, + redirect_defaults: bool = True, + converters: t.Optional[t.Mapping[str, t.Type["BaseConverter"]]] = None, + sort_parameters: bool = False, + sort_key: t.Optional[t.Callable[[t.Any], t.Any]] = None, + encoding_errors: str = "replace", + host_matching: bool = False, + ) -> None: + self._matcher = StateMachineMatcher(merge_slashes) + self._rules_by_endpoint: t.Dict[str, t.List[Rule]] = {} + self._remap = True + self._remap_lock = self.lock_class() + + self.default_subdomain = default_subdomain + self.charset = charset + self.encoding_errors = encoding_errors + self.strict_slashes = strict_slashes + self.merge_slashes = merge_slashes + self.redirect_defaults = redirect_defaults + self.host_matching = host_matching + + self.converters = self.default_converters.copy() + if converters: + self.converters.update(converters) + + self.sort_parameters = sort_parameters + self.sort_key = sort_key + + for rulefactory in rules or (): + self.add(rulefactory) + + def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool: + """Iterate over all rules and check if the endpoint expects + the arguments provided. This is for example useful if you have + some URLs that expect a language code and others that do not and + you want to wrap the builder a bit so that the current language + code is automatically added if not provided but endpoints expect + it. + + :param endpoint: the endpoint to check. + :param arguments: this function accepts one or more arguments + as positional arguments. Each one of them is + checked. + """ + self.update() + arguments = set(arguments) + for rule in self._rules_by_endpoint[endpoint]: + if arguments.issubset(rule.arguments): + return True + return False + + @property + def _rules(self) -> t.List[Rule]: + return [rule for rules in self._rules_by_endpoint.values() for rule in rules] + + def iter_rules(self, endpoint: t.Optional[str] = None) -> t.Iterator[Rule]: + """Iterate over all rules or the rules of an endpoint. + + :param endpoint: if provided only the rules for that endpoint + are returned. + :return: an iterator + """ + self.update() + if endpoint is not None: + return iter(self._rules_by_endpoint[endpoint]) + return iter(self._rules) + + def add(self, rulefactory: "RuleFactory") -> None: + """Add a new rule or factory to the map and bind it. Requires that the + rule is not bound to another map. + + :param rulefactory: a :class:`Rule` or :class:`RuleFactory` + """ + for rule in rulefactory.get_rules(self): + rule.bind(self) + if not rule.build_only: + self._matcher.add(rule) + self._rules_by_endpoint.setdefault(rule.endpoint, []).append(rule) + self._remap = True + + def bind( + self, + server_name: str, + script_name: t.Optional[str] = None, + subdomain: t.Optional[str] = None, + url_scheme: str = "http", + default_method: str = "GET", + path_info: t.Optional[str] = None, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + ) -> "MapAdapter": + """Return a new :class:`MapAdapter` with the details specified to the + call. Note that `script_name` will default to ``'/'`` if not further + specified or `None`. The `server_name` at least is a requirement + because the HTTP RFC requires absolute URLs for redirects and so all + redirect exceptions raised by Werkzeug will contain the full canonical + URL. + + If no path_info is passed to :meth:`match` it will use the default path + info passed to bind. While this doesn't really make sense for + manual bind calls, it's useful if you bind a map to a WSGI + environment which already contains the path info. + + `subdomain` will default to the `default_subdomain` for this map if + no defined. If there is no `default_subdomain` you cannot use the + subdomain feature. + + .. versionchanged:: 1.0 + If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules + will match. + + .. versionchanged:: 0.15 + ``path_info`` defaults to ``'/'`` if ``None``. + + .. versionchanged:: 0.8 + ``query_args`` can be a string. + + .. versionchanged:: 0.7 + Added ``query_args``. + """ + server_name = server_name.lower() + if self.host_matching: + if subdomain is not None: + raise RuntimeError("host matching enabled and a subdomain was provided") + elif subdomain is None: + subdomain = self.default_subdomain + if script_name is None: + script_name = "/" + if path_info is None: + path_info = "/" + + try: + server_name = _encode_idna(server_name) # type: ignore + except UnicodeError as e: + raise BadHost() from e + + return MapAdapter( + self, + server_name, + script_name, + subdomain, + url_scheme, + path_info, + default_method, + query_args, + ) + + def bind_to_environ( + self, + environ: t.Union["WSGIEnvironment", "Request"], + server_name: t.Optional[str] = None, + subdomain: t.Optional[str] = None, + ) -> "MapAdapter": + """Like :meth:`bind` but you can pass it an WSGI environment and it + will fetch the information from that dictionary. Note that because of + limitations in the protocol there is no way to get the current + subdomain and real `server_name` from the environment. If you don't + provide it, Werkzeug will use `SERVER_NAME` and `SERVER_PORT` (or + `HTTP_HOST` if provided) as used `server_name` with disabled subdomain + feature. + + If `subdomain` is `None` but an environment and a server name is + provided it will calculate the current subdomain automatically. + Example: `server_name` is ``'example.com'`` and the `SERVER_NAME` + in the wsgi `environ` is ``'staging.dev.example.com'`` the calculated + subdomain will be ``'staging.dev'``. + + If the object passed as environ has an environ attribute, the value of + this attribute is used instead. This allows you to pass request + objects. Additionally `PATH_INFO` added as a default of the + :class:`MapAdapter` so that you don't have to pass the path info to + the match method. + + .. versionchanged:: 1.0.0 + If the passed server name specifies port 443, it will match + if the incoming scheme is ``https`` without a port. + + .. versionchanged:: 1.0.0 + A warning is shown when the passed server name does not + match the incoming WSGI server name. + + .. versionchanged:: 0.8 + This will no longer raise a ValueError when an unexpected server + name was passed. + + .. versionchanged:: 0.5 + previously this method accepted a bogus `calculate_subdomain` + parameter that did not have any effect. It was removed because + of that. + + :param environ: a WSGI environment. + :param server_name: an optional server name hint (see above). + :param subdomain: optionally the current subdomain (see above). + """ + env = _get_environ(environ) + wsgi_server_name = get_host(env).lower() + scheme = env["wsgi.url_scheme"] + upgrade = any( + v.strip() == "upgrade" + for v in env.get("HTTP_CONNECTION", "").lower().split(",") + ) + + if upgrade and env.get("HTTP_UPGRADE", "").lower() == "websocket": + scheme = "wss" if scheme == "https" else "ws" + + if server_name is None: + server_name = wsgi_server_name + else: + server_name = server_name.lower() + + # strip standard port to match get_host() + if scheme in {"http", "ws"} and server_name.endswith(":80"): + server_name = server_name[:-3] + elif scheme in {"https", "wss"} and server_name.endswith(":443"): + server_name = server_name[:-4] + + if subdomain is None and not self.host_matching: + cur_server_name = wsgi_server_name.split(".") + real_server_name = server_name.split(".") + offset = -len(real_server_name) + + if cur_server_name[offset:] != real_server_name: + # This can happen even with valid configs if the server was + # accessed directly by IP address under some situations. + # Instead of raising an exception like in Werkzeug 0.7 or + # earlier we go by an invalid subdomain which will result + # in a 404 error on matching. + warnings.warn( + f"Current server name {wsgi_server_name!r} doesn't match configured" + f" server name {server_name!r}", + stacklevel=2, + ) + subdomain = "" + else: + subdomain = ".".join(filter(None, cur_server_name[:offset])) + + def _get_wsgi_string(name: str) -> t.Optional[str]: + val = env.get(name) + if val is not None: + return _wsgi_decoding_dance(val, self.charset) + return None + + script_name = _get_wsgi_string("SCRIPT_NAME") + path_info = _get_wsgi_string("PATH_INFO") + query_args = _get_wsgi_string("QUERY_STRING") + return Map.bind( + self, + server_name, + script_name, + subdomain, + scheme, + env["REQUEST_METHOD"], + path_info, + query_args=query_args, + ) + + def update(self) -> None: + """Called before matching and building to keep the compiled rules + in the correct order after things changed. + """ + if not self._remap: + return + + with self._remap_lock: + if not self._remap: + return + + self._matcher.update() + for rules in self._rules_by_endpoint.values(): + rules.sort(key=lambda x: x.build_compare_key()) + self._remap = False + + def __repr__(self) -> str: + rules = self.iter_rules() + return f"{type(self).__name__}({pformat(list(rules))})" + + +class MapAdapter: + + """Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does + the URL matching and building based on runtime information. + """ + + def __init__( + self, + map: Map, + server_name: str, + script_name: str, + subdomain: t.Optional[str], + url_scheme: str, + path_info: str, + default_method: str, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + ): + self.map = map + self.server_name = _to_str(server_name) + script_name = _to_str(script_name) + if not script_name.endswith("/"): + script_name += "/" + self.script_name = script_name + self.subdomain = _to_str(subdomain) + self.url_scheme = _to_str(url_scheme) + self.path_info = _to_str(path_info) + self.default_method = _to_str(default_method) + self.query_args = query_args + self.websocket = self.url_scheme in {"ws", "wss"} + + def dispatch( + self, + view_func: t.Callable[[str, t.Mapping[str, t.Any]], "WSGIApplication"], + path_info: t.Optional[str] = None, + method: t.Optional[str] = None, + catch_http_exceptions: bool = False, + ) -> "WSGIApplication": + """Does the complete dispatching process. `view_func` is called with + the endpoint and a dict with the values for the view. It should + look up the view function, call it, and return a response object + or WSGI application. http exceptions are not caught by default + so that applications can display nicer error messages by just + catching them by hand. If you want to stick with the default + error messages you can pass it ``catch_http_exceptions=True`` and + it will catch the http exceptions. + + Here a small example for the dispatch usage:: + + from werkzeug.wrappers import Request, Response + from werkzeug.wsgi import responder + from werkzeug.routing import Map, Rule + + def on_index(request): + return Response('Hello from the index') + + url_map = Map([Rule('/', endpoint='index')]) + views = {'index': on_index} + + @responder + def application(environ, start_response): + request = Request(environ) + urls = url_map.bind_to_environ(environ) + return urls.dispatch(lambda e, v: views[e](request, **v), + catch_http_exceptions=True) + + Keep in mind that this method might return exception objects, too, so + use :class:`Response.force_type` to get a response object. + + :param view_func: a function that is called with the endpoint as + first argument and the value dict as second. Has + to dispatch to the actual view function with this + information. (see above) + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + :param catch_http_exceptions: set to `True` to catch any of the + werkzeug :class:`HTTPException`\\s. + """ + try: + try: + endpoint, args = self.match(path_info, method) + except RequestRedirect as e: + return e + return view_func(endpoint, args) + except HTTPException as e: + if catch_http_exceptions: + return e + raise + + @t.overload + def match( # type: ignore + self, + path_info: t.Optional[str] = None, + method: t.Optional[str] = None, + return_rule: "te.Literal[False]" = False, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + websocket: t.Optional[bool] = None, + ) -> t.Tuple[str, t.Mapping[str, t.Any]]: + ... + + @t.overload + def match( + self, + path_info: t.Optional[str] = None, + method: t.Optional[str] = None, + return_rule: "te.Literal[True]" = True, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + websocket: t.Optional[bool] = None, + ) -> t.Tuple[Rule, t.Mapping[str, t.Any]]: + ... + + def match( + self, + path_info: t.Optional[str] = None, + method: t.Optional[str] = None, + return_rule: bool = False, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + websocket: t.Optional[bool] = None, + ) -> t.Tuple[t.Union[str, Rule], t.Mapping[str, t.Any]]: + """The usage is simple: you just pass the match method the current + path info as well as the method (which defaults to `GET`). The + following things can then happen: + + - you receive a `NotFound` exception that indicates that no URL is + matching. A `NotFound` exception is also a WSGI application you + can call to get a default page not found page (happens to be the + same object as `werkzeug.exceptions.NotFound`) + + - you receive a `MethodNotAllowed` exception that indicates that there + is a match for this URL but not for the current request method. + This is useful for RESTful applications. + + - you receive a `RequestRedirect` exception with a `new_url` + attribute. This exception is used to notify you about a request + Werkzeug requests from your WSGI application. This is for example the + case if you request ``/foo`` although the correct URL is ``/foo/`` + You can use the `RequestRedirect` instance as response-like object + similar to all other subclasses of `HTTPException`. + + - you receive a ``WebsocketMismatch`` exception if the only + match is a WebSocket rule but the bind is an HTTP request, or + if the match is an HTTP rule but the bind is a WebSocket + request. + + - you get a tuple in the form ``(endpoint, arguments)`` if there is + a match (unless `return_rule` is True, in which case you get a tuple + in the form ``(rule, arguments)``) + + If the path info is not passed to the match method the default path + info of the map is used (defaults to the root URL if not defined + explicitly). + + All of the exceptions raised are subclasses of `HTTPException` so they + can be used as WSGI responses. They will all render generic error or + redirect pages. + + Here is a small example for matching: + + >>> m = Map([ + ... Rule('/', endpoint='index'), + ... Rule('/downloads/', endpoint='downloads/index'), + ... Rule('/downloads/', endpoint='downloads/show') + ... ]) + >>> urls = m.bind("example.com", "/") + >>> urls.match("/", "GET") + ('index', {}) + >>> urls.match("/downloads/42") + ('downloads/show', {'id': 42}) + + And here is what happens on redirect and missing URLs: + + >>> urls.match("/downloads") + Traceback (most recent call last): + ... + RequestRedirect: http://example.com/downloads/ + >>> urls.match("/missing") + Traceback (most recent call last): + ... + NotFound: 404 Not Found + + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + :param return_rule: return the rule that matched instead of just the + endpoint (defaults to `False`). + :param query_args: optional query arguments that are used for + automatic redirects as string or dictionary. It's + currently not possible to use the query arguments + for URL matching. + :param websocket: Match WebSocket instead of HTTP requests. A + websocket request has a ``ws`` or ``wss`` + :attr:`url_scheme`. This overrides that detection. + + .. versionadded:: 1.0 + Added ``websocket``. + + .. versionchanged:: 0.8 + ``query_args`` can be a string. + + .. versionadded:: 0.7 + Added ``query_args``. + + .. versionadded:: 0.6 + Added ``return_rule``. + """ + self.map.update() + if path_info is None: + path_info = self.path_info + else: + path_info = _to_str(path_info, self.map.charset) + if query_args is None: + query_args = self.query_args or {} + method = (method or self.default_method).upper() + + if websocket is None: + websocket = self.websocket + + domain_part = self.server_name if self.map.host_matching else self.subdomain + path_part = f"/{path_info.lstrip('/')}" if path_info else "" + + try: + result = self.map._matcher.match(domain_part, path_part, method, websocket) + except RequestPath as e: + raise RequestRedirect( + self.make_redirect_url( + url_quote(e.path_info, self.map.charset, safe="/:|+"), + query_args, + ) + ) from None + except RequestAliasRedirect as e: + raise RequestRedirect( + self.make_alias_redirect_url( + f"{domain_part}|{path_part}", + e.endpoint, + e.matched_values, + method, + query_args, + ) + ) from None + except NoMatch as e: + if e.have_match_for: + raise MethodNotAllowed(valid_methods=list(e.have_match_for)) from None + + if e.websocket_mismatch: + raise WebsocketMismatch() from None + + raise NotFound() from None + else: + rule, rv = result + + if self.map.redirect_defaults: + redirect_url = self.get_default_redirect(rule, method, rv, query_args) + if redirect_url is not None: + raise RequestRedirect(redirect_url) + + if rule.redirect_to is not None: + if isinstance(rule.redirect_to, str): + + def _handle_match(match: t.Match[str]) -> str: + value = rv[match.group(1)] + return rule._converters[match.group(1)].to_url(value) + + redirect_url = _simple_rule_re.sub(_handle_match, rule.redirect_to) + else: + redirect_url = rule.redirect_to(self, **rv) + + if self.subdomain: + netloc = f"{self.subdomain}.{self.server_name}" + else: + netloc = self.server_name + + raise RequestRedirect( + url_join( + f"{self.url_scheme or 'http'}://{netloc}{self.script_name}", + redirect_url, + ) + ) + + if return_rule: + return rule, rv + else: + return rule.endpoint, rv + + def test( + self, path_info: t.Optional[str] = None, method: t.Optional[str] = None + ) -> bool: + """Test if a rule would match. Works like `match` but returns `True` + if the URL matches, or `False` if it does not exist. + + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + """ + try: + self.match(path_info, method) + except RequestRedirect: + pass + except HTTPException: + return False + return True + + def allowed_methods(self, path_info: t.Optional[str] = None) -> t.Iterable[str]: + """Returns the valid methods that match for a given path. + + .. versionadded:: 0.7 + """ + try: + self.match(path_info, method="--") + except MethodNotAllowed as e: + return e.valid_methods # type: ignore + except HTTPException: + pass + return [] + + def get_host(self, domain_part: t.Optional[str]) -> str: + """Figures out the full host name for the given domain part. The + domain part is a subdomain in case host matching is disabled or + a full host name. + """ + if self.map.host_matching: + if domain_part is None: + return self.server_name + return _to_str(domain_part, "ascii") + subdomain = domain_part + if subdomain is None: + subdomain = self.subdomain + else: + subdomain = _to_str(subdomain, "ascii") + + if subdomain: + return f"{subdomain}.{self.server_name}" + else: + return self.server_name + + def get_default_redirect( + self, + rule: Rule, + method: str, + values: t.MutableMapping[str, t.Any], + query_args: t.Union[t.Mapping[str, t.Any], str], + ) -> t.Optional[str]: + """A helper that returns the URL to redirect to if it finds one. + This is used for default redirecting only. + + :internal: + """ + assert self.map.redirect_defaults + for r in self.map._rules_by_endpoint[rule.endpoint]: + # every rule that comes after this one, including ourself + # has a lower priority for the defaults. We order the ones + # with the highest priority up for building. + if r is rule: + break + if r.provides_defaults_for(rule) and r.suitable_for(values, method): + values.update(r.defaults) # type: ignore + domain_part, path = r.build(values) # type: ignore + return self.make_redirect_url(path, query_args, domain_part=domain_part) + return None + + def encode_query_args(self, query_args: t.Union[t.Mapping[str, t.Any], str]) -> str: + if not isinstance(query_args, str): + return url_encode(query_args, self.map.charset) + return query_args + + def make_redirect_url( + self, + path_info: str, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + domain_part: t.Optional[str] = None, + ) -> str: + """Creates a redirect URL. + + :internal: + """ + if query_args: + suffix = f"?{self.encode_query_args(query_args)}" + else: + suffix = "" + + scheme = self.url_scheme or "http" + host = self.get_host(domain_part) + path = posixpath.join(self.script_name.strip("/"), path_info.lstrip("/")) + return f"{scheme}://{host}/{path}{suffix}" + + def make_alias_redirect_url( + self, + path: str, + endpoint: str, + values: t.Mapping[str, t.Any], + method: str, + query_args: t.Union[t.Mapping[str, t.Any], str], + ) -> str: + """Internally called to make an alias redirect URL.""" + url = self.build( + endpoint, values, method, append_unknown=False, force_external=True + ) + if query_args: + url += f"?{self.encode_query_args(query_args)}" + assert url != path, "detected invalid alias setting. No canonical URL found" + return url + + def _partial_build( + self, + endpoint: str, + values: t.Mapping[str, t.Any], + method: t.Optional[str], + append_unknown: bool, + ) -> t.Optional[t.Tuple[str, str, bool]]: + """Helper for :meth:`build`. Returns subdomain and path for the + rule that accepts this endpoint, values and method. + + :internal: + """ + # in case the method is none, try with the default method first + if method is None: + rv = self._partial_build( + endpoint, values, self.default_method, append_unknown + ) + if rv is not None: + return rv + + # Default method did not match or a specific method is passed. + # Check all for first match with matching host. If no matching + # host is found, go with first result. + first_match = None + + for rule in self.map._rules_by_endpoint.get(endpoint, ()): + if rule.suitable_for(values, method): + build_rv = rule.build(values, append_unknown) + + if build_rv is not None: + rv = (build_rv[0], build_rv[1], rule.websocket) + if self.map.host_matching: + if rv[0] == self.server_name: + return rv + elif first_match is None: + first_match = rv + else: + return rv + + return first_match + + def build( + self, + endpoint: str, + values: t.Optional[t.Mapping[str, t.Any]] = None, + method: t.Optional[str] = None, + force_external: bool = False, + append_unknown: bool = True, + url_scheme: t.Optional[str] = None, + ) -> str: + """Building URLs works pretty much the other way round. Instead of + `match` you call `build` and pass it the endpoint and a dict of + arguments for the placeholders. + + The `build` function also accepts an argument called `force_external` + which, if you set it to `True` will force external URLs. Per default + external URLs (include the server name) will only be used if the + target URL is on a different subdomain. + + >>> m = Map([ + ... Rule('/', endpoint='index'), + ... Rule('/downloads/', endpoint='downloads/index'), + ... Rule('/downloads/', endpoint='downloads/show') + ... ]) + >>> urls = m.bind("example.com", "/") + >>> urls.build("index", {}) + '/' + >>> urls.build("downloads/show", {'id': 42}) + '/downloads/42' + >>> urls.build("downloads/show", {'id': 42}, force_external=True) + 'http://example.com/downloads/42' + + Because URLs cannot contain non ASCII data you will always get + bytes back. Non ASCII characters are urlencoded with the + charset defined on the map instance. + + Additional values are converted to strings and appended to the URL as + URL querystring parameters: + + >>> urls.build("index", {'q': 'My Searchstring'}) + '/?q=My+Searchstring' + + When processing those additional values, lists are furthermore + interpreted as multiple values (as per + :py:class:`werkzeug.datastructures.MultiDict`): + + >>> urls.build("index", {'q': ['a', 'b', 'c']}) + '/?q=a&q=b&q=c' + + Passing a ``MultiDict`` will also add multiple values: + + >>> urls.build("index", MultiDict((('p', 'z'), ('q', 'a'), ('q', 'b')))) + '/?p=z&q=a&q=b' + + If a rule does not exist when building a `BuildError` exception is + raised. + + The build method accepts an argument called `method` which allows you + to specify the method you want to have an URL built for if you have + different methods for the same endpoint specified. + + :param endpoint: the endpoint of the URL to build. + :param values: the values for the URL to build. Unhandled values are + appended to the URL as query parameters. + :param method: the HTTP method for the rule if there are different + URLs for different methods on the same endpoint. + :param force_external: enforce full canonical external URLs. If the URL + scheme is not provided, this will generate + a protocol-relative URL. + :param append_unknown: unknown parameters are appended to the generated + URL as query string argument. Disable this + if you want the builder to ignore those. + :param url_scheme: Scheme to use in place of the bound + :attr:`url_scheme`. + + .. versionchanged:: 2.0 + Added the ``url_scheme`` parameter. + + .. versionadded:: 0.6 + Added the ``append_unknown`` parameter. + """ + self.map.update() + + if values: + if isinstance(values, MultiDict): + values = { + k: (v[0] if len(v) == 1 else v) + for k, v in dict.items(values) + if len(v) != 0 + } + else: # plain dict + values = {k: v for k, v in values.items() if v is not None} + else: + values = {} + + rv = self._partial_build(endpoint, values, method, append_unknown) + if rv is None: + raise BuildError(endpoint, values, method, self) + + domain_part, path, websocket = rv + host = self.get_host(domain_part) + + if url_scheme is None: + url_scheme = self.url_scheme + + # Always build WebSocket routes with the scheme (browsers + # require full URLs). If bound to a WebSocket, ensure that HTTP + # routes are built with an HTTP scheme. + secure = url_scheme in {"https", "wss"} + + if websocket: + force_external = True + url_scheme = "wss" if secure else "ws" + elif url_scheme: + url_scheme = "https" if secure else "http" + + # shortcut this. + if not force_external and ( + (self.map.host_matching and host == self.server_name) + or (not self.map.host_matching and domain_part == self.subdomain) + ): + return f"{self.script_name.rstrip('/')}/{path.lstrip('/')}" + + scheme = f"{url_scheme}:" if url_scheme else "" + return f"{scheme}//{host}{self.script_name[:-1]}/{path.lstrip('/')}" diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/matcher.py b/venv/lib/python3.8/site-packages/werkzeug/routing/matcher.py new file mode 100644 index 0000000..d22b05a --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/routing/matcher.py @@ -0,0 +1,185 @@ +import re +import typing as t +from dataclasses import dataclass +from dataclasses import field + +from .converters import ValidationError +from .exceptions import NoMatch +from .exceptions import RequestAliasRedirect +from .exceptions import RequestPath +from .rules import Rule +from .rules import RulePart + + +class SlashRequired(Exception): + pass + + +@dataclass +class State: + """A representation of a rule state. + + This includes the *rules* that correspond to the state and the + possible *static* and *dynamic* transitions to the next state. + """ + + dynamic: t.List[t.Tuple[RulePart, "State"]] = field(default_factory=list) + rules: t.List[Rule] = field(default_factory=list) + static: t.Dict[str, "State"] = field(default_factory=dict) + + +class StateMachineMatcher: + def __init__(self, merge_slashes: bool) -> None: + self._root = State() + self.merge_slashes = merge_slashes + + def add(self, rule: Rule) -> None: + state = self._root + for part in rule._parts: + if part.static: + state.static.setdefault(part.content, State()) + state = state.static[part.content] + else: + for test_part, new_state in state.dynamic: + if test_part == part: + state = new_state + break + else: + new_state = State() + state.dynamic.append((part, new_state)) + state = new_state + state.rules.append(rule) + + def update(self) -> None: + # For every state the dynamic transitions should be sorted by + # the weight of the transition + state = self._root + + def _update_state(state: State) -> None: + state.dynamic.sort(key=lambda entry: entry[0].weight) + for new_state in state.static.values(): + _update_state(new_state) + for _, new_state in state.dynamic: + _update_state(new_state) + + _update_state(state) + + def match( + self, domain: str, path: str, method: str, websocket: bool + ) -> t.Tuple[Rule, t.MutableMapping[str, t.Any]]: + # To match to a rule we need to start at the root state and + # try to follow the transitions until we find a match, or find + # there is no transition to follow. + + have_match_for = set() + websocket_mismatch = False + + def _match( + state: State, parts: t.List[str], values: t.List[str] + ) -> t.Optional[t.Tuple[Rule, t.List[str]]]: + # This function is meant to be called recursively, and will attempt + # to match the head part to the state's transitions. + nonlocal have_match_for, websocket_mismatch + + # The base case is when all parts have been matched via + # transitions. Hence if there is a rule with methods & + # websocket that work return it and the dynamic values + # extracted. + if parts == []: + for rule in state.rules: + if rule.methods is not None and method not in rule.methods: + have_match_for.update(rule.methods) + elif rule.websocket != websocket: + websocket_mismatch = True + else: + return rule, values + + # Test if there is a match with this path with a + # trailing slash, if so raise an exception to report + # that matching is possible with an additional slash + if "" in state.static: + for rule in state.static[""].rules: + if websocket == rule.websocket and ( + rule.methods is None or method in rule.methods + ): + if rule.strict_slashes: + raise SlashRequired() + else: + return rule, values + return None + + part = parts[0] + # To match this part try the static transitions first + if part in state.static: + rv = _match(state.static[part], parts[1:], values) + if rv is not None: + return rv + # No match via the static transitions, so try the dynamic + # ones. + for test_part, new_state in state.dynamic: + target = part + remaining = parts[1:] + # A final part indicates a transition that always + # consumes the remaining parts i.e. transitions to a + # final state. + if test_part.final: + target = "/".join(parts) + remaining = [] + match = re.compile(test_part.content).match(target) + if match is not None: + rv = _match(new_state, remaining, values + list(match.groups())) + if rv is not None: + return rv + + # If there is no match and the only part left is a + # trailing slash ("") consider rules that aren't + # strict-slashes as these should match if there is a final + # slash part. + if parts == [""]: + for rule in state.rules: + if rule.strict_slashes: + continue + if rule.methods is not None and method not in rule.methods: + have_match_for.update(rule.methods) + elif rule.websocket != websocket: + websocket_mismatch = True + else: + return rule, values + + return None + + try: + rv = _match(self._root, [domain, *path.split("/")], []) + except SlashRequired: + raise RequestPath(f"{path}/") from None + + if self.merge_slashes and rv is None: + # Try to match again, but with slashes merged + path = re.sub("/{2,}?", "/", path) + try: + rv = _match(self._root, [domain, *path.split("/")], []) + except SlashRequired: + raise RequestPath(f"{path}/") from None + if rv is None: + raise NoMatch(have_match_for, websocket_mismatch) + else: + raise RequestPath(f"{path}") + elif rv is not None: + rule, values = rv + + result = {} + for name, value in zip(rule._converters.keys(), values): + try: + value = rule._converters[name].to_python(value) + except ValidationError: + raise NoMatch(have_match_for, websocket_mismatch) from None + result[str(name)] = value + if rule.defaults: + result.update(rule.defaults) + + if rule.alias and rule.map.redirect_defaults: + raise RequestAliasRedirect(result, rule.endpoint) + + return rule, result + + raise NoMatch(have_match_for, websocket_mismatch) diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing/rules.py b/venv/lib/python3.8/site-packages/werkzeug/routing/rules.py new file mode 100644 index 0000000..a61717a --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/routing/rules.py @@ -0,0 +1,879 @@ +import ast +import re +import typing as t +from dataclasses import dataclass +from string import Template +from types import CodeType + +from .._internal import _to_bytes +from ..urls import url_encode +from ..urls import url_quote +from .converters import ValidationError + +if t.TYPE_CHECKING: + from .converters import BaseConverter + from .map import Map + + +class Weighting(t.NamedTuple): + number_static_weights: int + static_weights: t.List[t.Tuple[int, int]] + number_argument_weights: int + argument_weights: t.List[int] + + +@dataclass +class RulePart: + """A part of a rule. + + Rules can be represented by parts as delimited by `/` with + instances of this class representing those parts. The *content* is + either the raw content if *static* or a regex string to match + against. The *weight* can be used to order parts when matching. + + """ + + content: str + final: bool + static: bool + weight: Weighting + + +_part_re = re.compile( + r""" + (?: + (?P\/) # a slash + | + (?P[^<\/]+) # static rule data + | + (?: + < + (?: + (?P[a-zA-Z_][a-zA-Z0-9_]*) # converter name + (?:\((?P.*?)\))? # converter arguments + \: # variable delimiter + )? + (?P[a-zA-Z_][a-zA-Z0-9_]*) # variable name + > + ) + ) + """, + re.VERBOSE, +) + +_simple_rule_re = re.compile(r"<([^>]+)>") +_converter_args_re = re.compile( + r""" + ((?P\w+)\s*=\s*)? + (?P + True|False| + \d+.\d+| + \d+.| + \d+| + [\w\d_.]+| + [urUR]?(?P"[^"]*?"|'[^']*') + )\s*, + """, + re.VERBOSE, +) + + +_PYTHON_CONSTANTS = {"None": None, "True": True, "False": False} + + +def _find(value: str, target: str, pos: int) -> int: + """Find the *target* in *value* after *pos*. + + Returns the *value* length if *target* isn't found. + """ + try: + return value.index(target, pos) + except ValueError: + return len(value) + + +def _pythonize(value: str) -> t.Union[None, bool, int, float, str]: + if value in _PYTHON_CONSTANTS: + return _PYTHON_CONSTANTS[value] + for convert in int, float: + try: + return convert(value) # type: ignore + except ValueError: + pass + if value[:1] == value[-1:] and value[0] in "\"'": + value = value[1:-1] + return str(value) + + +def parse_converter_args(argstr: str) -> t.Tuple[t.Tuple, t.Dict[str, t.Any]]: + argstr += "," + args = [] + kwargs = {} + + for item in _converter_args_re.finditer(argstr): + value = item.group("stringval") + if value is None: + value = item.group("value") + value = _pythonize(value) + if not item.group("name"): + args.append(value) + else: + name = item.group("name") + kwargs[name] = value + + return tuple(args), kwargs + + +class RuleFactory: + """As soon as you have more complex URL setups it's a good idea to use rule + factories to avoid repetitive tasks. Some of them are builtin, others can + be added by subclassing `RuleFactory` and overriding `get_rules`. + """ + + def get_rules(self, map: "Map") -> t.Iterable["Rule"]: + """Subclasses of `RuleFactory` have to override this method and return + an iterable of rules.""" + raise NotImplementedError() + + +class Subdomain(RuleFactory): + """All URLs provided by this factory have the subdomain set to a + specific domain. For example if you want to use the subdomain for + the current language this can be a good setup:: + + url_map = Map([ + Rule('/', endpoint='#select_language'), + Subdomain('', [ + Rule('/', endpoint='index'), + Rule('/about', endpoint='about'), + Rule('/help', endpoint='help') + ]) + ]) + + All the rules except for the ``'#select_language'`` endpoint will now + listen on a two letter long subdomain that holds the language code + for the current request. + """ + + def __init__(self, subdomain: str, rules: t.Iterable[RuleFactory]) -> None: + self.subdomain = subdomain + self.rules = rules + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.subdomain = self.subdomain + yield rule + + +class Submount(RuleFactory): + """Like `Subdomain` but prefixes the URL rule with a given string:: + + url_map = Map([ + Rule('/', endpoint='index'), + Submount('/blog', [ + Rule('/', endpoint='blog/index'), + Rule('/entry/', endpoint='blog/show') + ]) + ]) + + Now the rule ``'blog/show'`` matches ``/blog/entry/``. + """ + + def __init__(self, path: str, rules: t.Iterable[RuleFactory]) -> None: + self.path = path.rstrip("/") + self.rules = rules + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.rule = self.path + rule.rule + yield rule + + +class EndpointPrefix(RuleFactory): + """Prefixes all endpoints (which must be strings for this factory) with + another string. This can be useful for sub applications:: + + url_map = Map([ + Rule('/', endpoint='index'), + EndpointPrefix('blog/', [Submount('/blog', [ + Rule('/', endpoint='index'), + Rule('/entry/', endpoint='show') + ])]) + ]) + """ + + def __init__(self, prefix: str, rules: t.Iterable[RuleFactory]) -> None: + self.prefix = prefix + self.rules = rules + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.endpoint = self.prefix + rule.endpoint + yield rule + + +class RuleTemplate: + """Returns copies of the rules wrapped and expands string templates in + the endpoint, rule, defaults or subdomain sections. + + Here a small example for such a rule template:: + + from werkzeug.routing import Map, Rule, RuleTemplate + + resource = RuleTemplate([ + Rule('/$name/', endpoint='$name.list'), + Rule('/$name/', endpoint='$name.show') + ]) + + url_map = Map([resource(name='user'), resource(name='page')]) + + When a rule template is called the keyword arguments are used to + replace the placeholders in all the string parameters. + """ + + def __init__(self, rules: t.Iterable["Rule"]) -> None: + self.rules = list(rules) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> "RuleTemplateFactory": + return RuleTemplateFactory(self.rules, dict(*args, **kwargs)) + + +class RuleTemplateFactory(RuleFactory): + """A factory that fills in template variables into rules. Used by + `RuleTemplate` internally. + + :internal: + """ + + def __init__( + self, rules: t.Iterable[RuleFactory], context: t.Dict[str, t.Any] + ) -> None: + self.rules = rules + self.context = context + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + new_defaults = subdomain = None + if rule.defaults: + new_defaults = {} + for key, value in rule.defaults.items(): + if isinstance(value, str): + value = Template(value).substitute(self.context) + new_defaults[key] = value + if rule.subdomain is not None: + subdomain = Template(rule.subdomain).substitute(self.context) + new_endpoint = rule.endpoint + if isinstance(new_endpoint, str): + new_endpoint = Template(new_endpoint).substitute(self.context) + yield Rule( + Template(rule.rule).substitute(self.context), + new_defaults, + subdomain, + rule.methods, + rule.build_only, + new_endpoint, + rule.strict_slashes, + ) + + +def _prefix_names(src: str) -> ast.stmt: + """ast parse and prefix names with `.` to avoid collision with user vars""" + tree = ast.parse(src).body[0] + if isinstance(tree, ast.Expr): + tree = tree.value # type: ignore + for node in ast.walk(tree): + if isinstance(node, ast.Name): + node.id = f".{node.id}" + return tree + + +_CALL_CONVERTER_CODE_FMT = "self._converters[{elem!r}].to_url()" +_IF_KWARGS_URL_ENCODE_CODE = """\ +if kwargs: + params = self._encode_query_vars(kwargs) + q = "?" if params else "" +else: + q = params = "" +""" +_IF_KWARGS_URL_ENCODE_AST = _prefix_names(_IF_KWARGS_URL_ENCODE_CODE) +_URL_ENCODE_AST_NAMES = (_prefix_names("q"), _prefix_names("params")) + + +class Rule(RuleFactory): + """A Rule represents one URL pattern. There are some options for `Rule` + that change the way it behaves and are passed to the `Rule` constructor. + Note that besides the rule-string all arguments *must* be keyword arguments + in order to not break the application on Werkzeug upgrades. + + `string` + Rule strings basically are just normal URL paths with placeholders in + the format ```` where the converter and the + arguments are optional. If no converter is defined the `default` + converter is used which means `string` in the normal configuration. + + URL rules that end with a slash are branch URLs, others are leaves. + If you have `strict_slashes` enabled (which is the default), all + branch URLs that are matched without a trailing slash will trigger a + redirect to the same URL with the missing slash appended. + + The converters are defined on the `Map`. + + `endpoint` + The endpoint for this rule. This can be anything. A reference to a + function, a string, a number etc. The preferred way is using a string + because the endpoint is used for URL generation. + + `defaults` + An optional dict with defaults for other rules with the same endpoint. + This is a bit tricky but useful if you want to have unique URLs:: + + url_map = Map([ + Rule('/all/', defaults={'page': 1}, endpoint='all_entries'), + Rule('/all/page/', endpoint='all_entries') + ]) + + If a user now visits ``http://example.com/all/page/1`` they will be + redirected to ``http://example.com/all/``. If `redirect_defaults` is + disabled on the `Map` instance this will only affect the URL + generation. + + `subdomain` + The subdomain rule string for this rule. If not specified the rule + only matches for the `default_subdomain` of the map. If the map is + not bound to a subdomain this feature is disabled. + + Can be useful if you want to have user profiles on different subdomains + and all subdomains are forwarded to your application:: + + url_map = Map([ + Rule('/', subdomain='', endpoint='user/homepage'), + Rule('/stats', subdomain='', endpoint='user/stats') + ]) + + `methods` + A sequence of http methods this rule applies to. If not specified, all + methods are allowed. For example this can be useful if you want different + endpoints for `POST` and `GET`. If methods are defined and the path + matches but the method matched against is not in this list or in the + list of another rule for that path the error raised is of the type + `MethodNotAllowed` rather than `NotFound`. If `GET` is present in the + list of methods and `HEAD` is not, `HEAD` is added automatically. + + `strict_slashes` + Override the `Map` setting for `strict_slashes` only for this rule. If + not specified the `Map` setting is used. + + `merge_slashes` + Override :attr:`Map.merge_slashes` for this rule. + + `build_only` + Set this to True and the rule will never match but will create a URL + that can be build. This is useful if you have resources on a subdomain + or folder that are not handled by the WSGI application (like static data) + + `redirect_to` + If given this must be either a string or callable. In case of a + callable it's called with the url adapter that triggered the match and + the values of the URL as keyword arguments and has to return the target + for the redirect, otherwise it has to be a string with placeholders in + rule syntax:: + + def foo_with_slug(adapter, id): + # ask the database for the slug for the old id. this of + # course has nothing to do with werkzeug. + return f'foo/{Foo.get_slug_for_id(id)}' + + url_map = Map([ + Rule('/foo/', endpoint='foo'), + Rule('/some/old/url/', redirect_to='foo/'), + Rule('/other/old/url/', redirect_to=foo_with_slug) + ]) + + When the rule is matched the routing system will raise a + `RequestRedirect` exception with the target for the redirect. + + Keep in mind that the URL will be joined against the URL root of the + script so don't use a leading slash on the target URL unless you + really mean root of that domain. + + `alias` + If enabled this rule serves as an alias for another rule with the same + endpoint and arguments. + + `host` + If provided and the URL map has host matching enabled this can be + used to provide a match rule for the whole host. This also means + that the subdomain feature is disabled. + + `websocket` + If ``True``, this rule is only matches for WebSocket (``ws://``, + ``wss://``) requests. By default, rules will only match for HTTP + requests. + + .. versionchanged:: 2.1 + Percent-encoded newlines (``%0a``), which are decoded by WSGI + servers, are considered when routing instead of terminating the + match early. + + .. versionadded:: 1.0 + Added ``websocket``. + + .. versionadded:: 1.0 + Added ``merge_slashes``. + + .. versionadded:: 0.7 + Added ``alias`` and ``host``. + + .. versionchanged:: 0.6.1 + ``HEAD`` is added to ``methods`` if ``GET`` is present. + """ + + def __init__( + self, + string: str, + defaults: t.Optional[t.Mapping[str, t.Any]] = None, + subdomain: t.Optional[str] = None, + methods: t.Optional[t.Iterable[str]] = None, + build_only: bool = False, + endpoint: t.Optional[str] = None, + strict_slashes: t.Optional[bool] = None, + merge_slashes: t.Optional[bool] = None, + redirect_to: t.Optional[t.Union[str, t.Callable[..., str]]] = None, + alias: bool = False, + host: t.Optional[str] = None, + websocket: bool = False, + ) -> None: + if not string.startswith("/"): + raise ValueError("urls must start with a leading slash") + self.rule = string + self.is_leaf = not string.endswith("/") + self.is_branch = string.endswith("/") + + self.map: "Map" = None # type: ignore + self.strict_slashes = strict_slashes + self.merge_slashes = merge_slashes + self.subdomain = subdomain + self.host = host + self.defaults = defaults + self.build_only = build_only + self.alias = alias + self.websocket = websocket + + if methods is not None: + if isinstance(methods, str): + raise TypeError("'methods' should be a list of strings.") + + methods = {x.upper() for x in methods} + + if "HEAD" not in methods and "GET" in methods: + methods.add("HEAD") + + if websocket and methods - {"GET", "HEAD", "OPTIONS"}: + raise ValueError( + "WebSocket rules can only use 'GET', 'HEAD', and 'OPTIONS' methods." + ) + + self.methods = methods + self.endpoint: str = endpoint # type: ignore + self.redirect_to = redirect_to + + if defaults: + self.arguments = set(map(str, defaults)) + else: + self.arguments = set() + + self._converters: t.Dict[str, "BaseConverter"] = {} + self._trace: t.List[t.Tuple[bool, str]] = [] + self._parts: t.List[RulePart] = [] + + def empty(self) -> "Rule": + """ + Return an unbound copy of this rule. + + This can be useful if want to reuse an already bound URL for another + map. See ``get_empty_kwargs`` to override what keyword arguments are + provided to the new copy. + """ + return type(self)(self.rule, **self.get_empty_kwargs()) + + def get_empty_kwargs(self) -> t.Mapping[str, t.Any]: + """ + Provides kwargs for instantiating empty copy with empty() + + Use this method to provide custom keyword arguments to the subclass of + ``Rule`` when calling ``some_rule.empty()``. Helpful when the subclass + has custom keyword arguments that are needed at instantiation. + + Must return a ``dict`` that will be provided as kwargs to the new + instance of ``Rule``, following the initial ``self.rule`` value which + is always provided as the first, required positional argument. + """ + defaults = None + if self.defaults: + defaults = dict(self.defaults) + return dict( + defaults=defaults, + subdomain=self.subdomain, + methods=self.methods, + build_only=self.build_only, + endpoint=self.endpoint, + strict_slashes=self.strict_slashes, + redirect_to=self.redirect_to, + alias=self.alias, + host=self.host, + ) + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + yield self + + def refresh(self) -> None: + """Rebinds and refreshes the URL. Call this if you modified the + rule in place. + + :internal: + """ + self.bind(self.map, rebind=True) + + def bind(self, map: "Map", rebind: bool = False) -> None: + """Bind the url to a map and create a regular expression based on + the information from the rule itself and the defaults from the map. + + :internal: + """ + if self.map is not None and not rebind: + raise RuntimeError(f"url rule {self!r} already bound to map {self.map!r}") + self.map = map + if self.strict_slashes is None: + self.strict_slashes = map.strict_slashes + if self.merge_slashes is None: + self.merge_slashes = map.merge_slashes + if self.subdomain is None: + self.subdomain = map.default_subdomain + self.compile() + + def get_converter( + self, + variable_name: str, + converter_name: str, + args: t.Tuple, + kwargs: t.Mapping[str, t.Any], + ) -> "BaseConverter": + """Looks up the converter for the given parameter. + + .. versionadded:: 0.9 + """ + if converter_name not in self.map.converters: + raise LookupError(f"the converter {converter_name!r} does not exist") + return self.map.converters[converter_name](self.map, *args, **kwargs) + + def _encode_query_vars(self, query_vars: t.Mapping[str, t.Any]) -> str: + return url_encode( + query_vars, + charset=self.map.charset, + sort=self.map.sort_parameters, + key=self.map.sort_key, + ) + + def _parse_rule(self, rule: str) -> t.Iterable[RulePart]: + content = "" + static = True + argument_weights = [] + static_weights: t.List[t.Tuple[int, int]] = [] + final = False + + pos = 0 + while pos < len(rule): + match = _part_re.match(rule, pos) + if match is None: + raise ValueError(f"malformed url rule: {rule!r}") + + data = match.groupdict() + if data["static"] is not None: + static_weights.append((len(static_weights), -len(data["static"]))) + self._trace.append((False, data["static"])) + content += data["static"] if static else re.escape(data["static"]) + + if data["variable"] is not None: + if static: + # Switching content to represent regex, hence the need to escape + content = re.escape(content) + static = False + c_args, c_kwargs = parse_converter_args(data["arguments"] or "") + convobj = self.get_converter( + data["variable"], data["converter"] or "default", c_args, c_kwargs + ) + self._converters[data["variable"]] = convobj + self.arguments.add(data["variable"]) + if not convobj.part_isolating: + final = True + content += f"({convobj.regex})" + argument_weights.append(convobj.weight) + self._trace.append((True, data["variable"])) + + if data["slash"] is not None: + self._trace.append((False, "/")) + if final: + content += "/" + else: + if not static: + content += r"\Z" + weight = Weighting( + -len(static_weights), + static_weights, + -len(argument_weights), + argument_weights, + ) + yield RulePart( + content=content, final=final, static=static, weight=weight + ) + content = "" + static = True + argument_weights = [] + static_weights = [] + final = False + + pos = match.end() + + if not static: + content += r"\Z" + weight = Weighting( + -len(static_weights), + static_weights, + -len(argument_weights), + argument_weights, + ) + yield RulePart(content=content, final=final, static=static, weight=weight) + + def compile(self) -> None: + """Compiles the regular expression and stores it.""" + assert self.map is not None, "rule not bound" + + if self.map.host_matching: + domain_rule = self.host or "" + else: + domain_rule = self.subdomain or "" + self._parts = [] + self._trace = [] + self._converters = {} + if domain_rule == "": + self._parts = [ + RulePart( + content="", final=False, static=True, weight=Weighting(0, [], 0, []) + ) + ] + else: + self._parts.extend(self._parse_rule(domain_rule)) + self._trace.append((False, "|")) + rule = self.rule + if self.merge_slashes: + rule = re.sub("/{2,}?", "/", self.rule) + self._parts.extend(self._parse_rule(rule)) + + self._build: t.Callable[..., t.Tuple[str, str]] + self._build = self._compile_builder(False).__get__(self, None) + self._build_unknown: t.Callable[..., t.Tuple[str, str]] + self._build_unknown = self._compile_builder(True).__get__(self, None) + + @staticmethod + def _get_func_code(code: CodeType, name: str) -> t.Callable[..., t.Tuple[str, str]]: + globs: t.Dict[str, t.Any] = {} + locs: t.Dict[str, t.Any] = {} + exec(code, globs, locs) + return locs[name] # type: ignore + + def _compile_builder( + self, append_unknown: bool = True + ) -> t.Callable[..., t.Tuple[str, str]]: + defaults = self.defaults or {} + dom_ops: t.List[t.Tuple[bool, str]] = [] + url_ops: t.List[t.Tuple[bool, str]] = [] + + opl = dom_ops + for is_dynamic, data in self._trace: + if data == "|" and opl is dom_ops: + opl = url_ops + continue + # this seems like a silly case to ever come up but: + # if a default is given for a value that appears in the rule, + # resolve it to a constant ahead of time + if is_dynamic and data in defaults: + data = self._converters[data].to_url(defaults[data]) + opl.append((False, data)) + elif not is_dynamic: + opl.append( + (False, url_quote(_to_bytes(data, self.map.charset), safe="/:|+")) + ) + else: + opl.append((True, data)) + + def _convert(elem: str) -> ast.stmt: + ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem)) + ret.args = [ast.Name(str(elem), ast.Load())] # type: ignore # str for py2 + return ret + + def _parts(ops: t.List[t.Tuple[bool, str]]) -> t.List[ast.AST]: + parts = [ + _convert(elem) if is_dynamic else ast.Str(s=elem) + for is_dynamic, elem in ops + ] + parts = parts or [ast.Str("")] + # constant fold + ret = [parts[0]] + for p in parts[1:]: + if isinstance(p, ast.Str) and isinstance(ret[-1], ast.Str): + ret[-1] = ast.Str(ret[-1].s + p.s) + else: + ret.append(p) + return ret + + dom_parts = _parts(dom_ops) + url_parts = _parts(url_ops) + if not append_unknown: + body = [] + else: + body = [_IF_KWARGS_URL_ENCODE_AST] + url_parts.extend(_URL_ENCODE_AST_NAMES) + + def _join(parts: t.List[ast.AST]) -> ast.AST: + if len(parts) == 1: # shortcut + return parts[0] + return ast.JoinedStr(parts) + + body.append( + ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load())) + ) + + pargs = [ + elem + for is_dynamic, elem in dom_ops + url_ops + if is_dynamic and elem not in defaults + ] + kargs = [str(k) for k in defaults] + + func_ast: ast.FunctionDef = _prefix_names("def _(): pass") # type: ignore + func_ast.name = f"" + func_ast.args.args.append(ast.arg(".self", None)) + for arg in pargs + kargs: + func_ast.args.args.append(ast.arg(arg, None)) + func_ast.args.kwarg = ast.arg(".kwargs", None) + for _ in kargs: + func_ast.args.defaults.append(ast.Str("")) + func_ast.body = body + + # use `ast.parse` instead of `ast.Module` for better portability + # Python 3.8 changes the signature of `ast.Module` + module = ast.parse("") + module.body = [func_ast] + + # mark everything as on line 1, offset 0 + # less error-prone than `ast.fix_missing_locations` + # bad line numbers cause an assert to fail in debug builds + for node in ast.walk(module): + if "lineno" in node._attributes: + node.lineno = 1 + if "end_lineno" in node._attributes: + node.end_lineno = node.lineno # type: ignore[attr-defined] + if "col_offset" in node._attributes: + node.col_offset = 0 + if "end_col_offset" in node._attributes: + node.end_col_offset = node.col_offset # type: ignore[attr-defined] + + code = compile(module, "", "exec") + return self._get_func_code(code, func_ast.name) + + def build( + self, values: t.Mapping[str, t.Any], append_unknown: bool = True + ) -> t.Optional[t.Tuple[str, str]]: + """Assembles the relative url for that rule and the subdomain. + If building doesn't work for some reasons `None` is returned. + + :internal: + """ + try: + if append_unknown: + return self._build_unknown(**values) + else: + return self._build(**values) + except ValidationError: + return None + + def provides_defaults_for(self, rule: "Rule") -> bool: + """Check if this rule has defaults for a given rule. + + :internal: + """ + return bool( + not self.build_only + and self.defaults + and self.endpoint == rule.endpoint + and self != rule + and self.arguments == rule.arguments + ) + + def suitable_for( + self, values: t.Mapping[str, t.Any], method: t.Optional[str] = None + ) -> bool: + """Check if the dict of values has enough data for url generation. + + :internal: + """ + # if a method was given explicitly and that method is not supported + # by this rule, this rule is not suitable. + if ( + method is not None + and self.methods is not None + and method not in self.methods + ): + return False + + defaults = self.defaults or () + + # all arguments required must be either in the defaults dict or + # the value dictionary otherwise it's not suitable + for key in self.arguments: + if key not in defaults and key not in values: + return False + + # in case defaults are given we ensure that either the value was + # skipped or the value is the same as the default value. + if defaults: + for key, value in defaults.items(): + if key in values and value != values[key]: + return False + + return True + + def build_compare_key(self) -> t.Tuple[int, int, int]: + """The build compare key for sorting. + + :internal: + """ + return (1 if self.alias else 0, -len(self.arguments), -len(self.defaults or ())) + + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) and self._trace == other._trace + + __hash__ = None # type: ignore + + def __str__(self) -> str: + return self.rule + + def __repr__(self) -> str: + if self.map is None: + return f"<{type(self).__name__} (unbound)>" + parts = [] + for is_dynamic, data in self._trace: + if is_dynamic: + parts.append(f"<{data}>") + else: + parts.append(data) + parts = "".join(parts).lstrip("|") + methods = f" ({', '.join(self.methods)})" if self.methods is not None else "" + return f"<{type(self).__name__} {parts!r}{methods} -> {self.endpoint}>" diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..c7977e9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/http.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/http.cpython-38.pyc new file mode 100644 index 0000000..78af4ee Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/http.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-38.pyc new file mode 100644 index 0000000..53fc6be Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/request.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/request.cpython-38.pyc new file mode 100644 index 0000000..e301804 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/request.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/response.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/response.cpython-38.pyc new file mode 100644 index 0000000..3f22961 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/response.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..894e074 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/http.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/http.py new file mode 100644 index 0000000..8288882 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/http.py @@ -0,0 +1,140 @@ +import re +import typing as t +from datetime import datetime + +from .._internal import _cookie_parse_impl +from .._internal import _dt_as_utc +from .._internal import _to_str +from ..http import generate_etag +from ..http import parse_date +from ..http import parse_etags +from ..http import parse_if_range_header +from ..http import unquote_etag + +_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)') + + +def is_resource_modified( + http_range: t.Optional[str] = None, + http_if_range: t.Optional[str] = None, + http_if_modified_since: t.Optional[str] = None, + http_if_none_match: t.Optional[str] = None, + http_if_match: t.Optional[str] = None, + etag: t.Optional[str] = None, + data: t.Optional[bytes] = None, + last_modified: t.Optional[t.Union[datetime, str]] = None, + ignore_if_range: bool = True, +) -> bool: + """Convenience method for conditional requests. + :param http_range: Range HTTP header + :param http_if_range: If-Range HTTP header + :param http_if_modified_since: If-Modified-Since HTTP header + :param http_if_none_match: If-None-Match HTTP header + :param http_if_match: If-Match HTTP header + :param etag: the etag for the response for comparison. + :param data: or alternatively the data of the response to automatically + generate an etag using :func:`generate_etag`. + :param last_modified: an optional date of the last modification. + :param ignore_if_range: If `False`, `If-Range` header will be taken into + account. + :return: `True` if the resource was modified, otherwise `False`. + + .. versionadded:: 2.2 + """ + if etag is None and data is not None: + etag = generate_etag(data) + elif data is not None: + raise TypeError("both data and etag given") + + unmodified = False + if isinstance(last_modified, str): + last_modified = parse_date(last_modified) + + # HTTP doesn't use microsecond, remove it to avoid false positive + # comparisons. Mark naive datetimes as UTC. + if last_modified is not None: + last_modified = _dt_as_utc(last_modified.replace(microsecond=0)) + + if_range = None + if not ignore_if_range and http_range is not None: + # https://tools.ietf.org/html/rfc7233#section-3.2 + # A server MUST ignore an If-Range header field received in a request + # that does not contain a Range header field. + if_range = parse_if_range_header(http_if_range) + + if if_range is not None and if_range.date is not None: + modified_since: t.Optional[datetime] = if_range.date + else: + modified_since = parse_date(http_if_modified_since) + + if modified_since and last_modified and last_modified <= modified_since: + unmodified = True + + if etag: + etag, _ = unquote_etag(etag) + etag = t.cast(str, etag) + + if if_range is not None and if_range.etag is not None: + unmodified = parse_etags(if_range.etag).contains(etag) + else: + if_none_match = parse_etags(http_if_none_match) + if if_none_match: + # https://tools.ietf.org/html/rfc7232#section-3.2 + # "A recipient MUST use the weak comparison function when comparing + # entity-tags for If-None-Match" + unmodified = if_none_match.contains_weak(etag) + + # https://tools.ietf.org/html/rfc7232#section-3.1 + # "Origin server MUST use the strong comparison function when + # comparing entity-tags for If-Match" + if_match = parse_etags(http_if_match) + if if_match: + unmodified = not if_match.is_strong(etag) + + return not unmodified + + +def parse_cookie( + cookie: t.Union[bytes, str, None] = "", + charset: str = "utf-8", + errors: str = "replace", + cls: t.Optional[t.Type["ds.MultiDict"]] = None, +) -> "ds.MultiDict[str, str]": + """Parse a cookie from a string. + + The same key can be provided multiple times, the values are stored + in-order. The default :class:`MultiDict` will have the first value + first, and all values can be retrieved with + :meth:`MultiDict.getlist`. + + :param cookie: The cookie header as a string. + :param charset: The charset for the cookie values. + :param errors: The error behavior for the charset decoding. + :param cls: A dict-like class to store the parsed cookies in. + Defaults to :class:`MultiDict`. + + .. versionadded:: 2.2 + """ + # PEP 3333 sends headers through the environ as latin1 decoded + # strings. Encode strings back to bytes for parsing. + if isinstance(cookie, str): + cookie = cookie.encode("latin1", "replace") + + if cls is None: + cls = ds.MultiDict + + def _parse_pairs() -> t.Iterator[t.Tuple[str, str]]: + for key, val in _cookie_parse_impl(cookie): # type: ignore + key_str = _to_str(key, charset, errors, allow_none_charset=True) + + if not key_str: + continue + + val_str = _to_str(val, charset, errors, allow_none_charset=True) + yield key_str, val_str + + return cls(_parse_pairs()) + + +# circular dependencies +from .. import datastructures as ds diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py new file mode 100644 index 0000000..d8abeb3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py @@ -0,0 +1,279 @@ +import re +from dataclasses import dataclass +from enum import auto +from enum import Enum +from typing import cast +from typing import List +from typing import Optional +from typing import Tuple + +from .._internal import _to_bytes +from .._internal import _to_str +from ..datastructures import Headers +from ..exceptions import RequestEntityTooLarge +from ..http import parse_options_header + + +class Event: + pass + + +@dataclass(frozen=True) +class Preamble(Event): + data: bytes + + +@dataclass(frozen=True) +class Field(Event): + name: str + headers: Headers + + +@dataclass(frozen=True) +class File(Event): + name: str + filename: str + headers: Headers + + +@dataclass(frozen=True) +class Data(Event): + data: bytes + more_data: bool + + +@dataclass(frozen=True) +class Epilogue(Event): + data: bytes + + +class NeedData(Event): + pass + + +NEED_DATA = NeedData() + + +class State(Enum): + PREAMBLE = auto() + PART = auto() + DATA = auto() + EPILOGUE = auto() + COMPLETE = auto() + + +# Multipart line breaks MUST be CRLF (\r\n) by RFC-7578, except that +# many implementations break this and either use CR or LF alone. +LINE_BREAK = b"(?:\r\n|\n|\r)" +BLANK_LINE_RE = re.compile(b"(?:\r\n\r\n|\r\r|\n\n)", re.MULTILINE) +LINE_BREAK_RE = re.compile(LINE_BREAK, re.MULTILINE) +# Header values can be continued via a space or tab after the linebreak, as +# per RFC2231 +HEADER_CONTINUATION_RE = re.compile(b"%s[ \t]" % LINE_BREAK, re.MULTILINE) +# This must be long enough to contain any line breaks plus any +# additional boundary markers (--) such that they will be found in a +# subsequent search +SEARCH_EXTRA_LENGTH = 8 + + +class MultipartDecoder: + """Decodes a multipart message as bytes into Python events. + + The part data is returned as available to allow the caller to save + the data from memory to disk, if desired. + """ + + def __init__( + self, + boundary: bytes, + max_form_memory_size: Optional[int] = None, + ) -> None: + self.buffer = bytearray() + self.complete = False + self.max_form_memory_size = max_form_memory_size + self.state = State.PREAMBLE + self.boundary = boundary + + # Note in the below \h i.e. horizontal whitespace is used + # as [^\S\n\r] as \h isn't supported in python. + + # The preamble must end with a boundary where the boundary is + # prefixed by a line break, RFC2046. Except that many + # implementations including Werkzeug's tests omit the line + # break prefix. In addition the first boundary could be the + # epilogue boundary (for empty form-data) hence the matching + # group to understand if it is an epilogue boundary. + self.preamble_re = re.compile( + rb"%s?--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)" + % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK), + re.MULTILINE, + ) + # A boundary must include a line break prefix and suffix, and + # may include trailing whitespace. In addition the boundary + # could be the epilogue boundary hence the matching group to + # understand if it is an epilogue boundary. + self.boundary_re = re.compile( + rb"%s--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)" + % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK), + re.MULTILINE, + ) + self._search_position = 0 + + def last_newline(self) -> int: + try: + last_nl = self.buffer.rindex(b"\n") + except ValueError: + last_nl = len(self.buffer) + try: + last_cr = self.buffer.rindex(b"\r") + except ValueError: + last_cr = len(self.buffer) + + return min(last_nl, last_cr) + + def receive_data(self, data: Optional[bytes]) -> None: + if data is None: + self.complete = True + elif ( + self.max_form_memory_size is not None + and len(self.buffer) + len(data) > self.max_form_memory_size + ): + raise RequestEntityTooLarge() + else: + self.buffer.extend(data) + + def next_event(self) -> Event: + event: Event = NEED_DATA + + if self.state == State.PREAMBLE: + match = self.preamble_re.search(self.buffer, self._search_position) + if match is not None: + if match.group(1).startswith(b"--"): + self.state = State.EPILOGUE + else: + self.state = State.PART + data = bytes(self.buffer[: match.start()]) + del self.buffer[: match.end()] + event = Preamble(data=data) + self._search_position = 0 + else: + # Update the search start position to be equal to the + # current buffer length (already searched) minus a + # safe buffer for part of the search target. + self._search_position = max( + 0, len(self.buffer) - len(self.boundary) - SEARCH_EXTRA_LENGTH + ) + + elif self.state == State.PART: + match = BLANK_LINE_RE.search(self.buffer, self._search_position) + if match is not None: + headers = self._parse_headers(self.buffer[: match.start()]) + del self.buffer[: match.end()] + + if "content-disposition" not in headers: + raise ValueError("Missing Content-Disposition header") + + disposition, extra = parse_options_header( + headers["content-disposition"] + ) + name = cast(str, extra.get("name")) + filename = extra.get("filename") + if filename is not None: + event = File( + filename=filename, + headers=headers, + name=name, + ) + else: + event = Field( + headers=headers, + name=name, + ) + self.state = State.DATA + self._search_position = 0 + else: + # Update the search start position to be equal to the + # current buffer length (already searched) minus a + # safe buffer for part of the search target. + self._search_position = max(0, len(self.buffer) - SEARCH_EXTRA_LENGTH) + + elif self.state == State.DATA: + if self.buffer.find(b"--" + self.boundary) == -1: + # No complete boundary in the buffer, but there may be + # a partial boundary at the end. As the boundary + # starts with either a nl or cr find the earliest and + # return up to that as data. + data_length = del_index = self.last_newline() + more_data = True + else: + match = self.boundary_re.search(self.buffer) + if match is not None: + if match.group(1).startswith(b"--"): + self.state = State.EPILOGUE + else: + self.state = State.PART + data_length = match.start() + del_index = match.end() + else: + data_length = del_index = self.last_newline() + more_data = match is None + + data = bytes(self.buffer[:data_length]) + del self.buffer[:del_index] + if data or not more_data: + event = Data(data=data, more_data=more_data) + + elif self.state == State.EPILOGUE and self.complete: + event = Epilogue(data=bytes(self.buffer)) + del self.buffer[:] + self.state = State.COMPLETE + + if self.complete and isinstance(event, NeedData): + raise ValueError(f"Invalid form-data cannot parse beyond {self.state}") + + return event + + def _parse_headers(self, data: bytes) -> Headers: + headers: List[Tuple[str, str]] = [] + # Merge the continued headers into one line + data = HEADER_CONTINUATION_RE.sub(b" ", data) + # Now there is one header per line + for line in data.splitlines(): + if line.strip() != b"": + name, value = _to_str(line).strip().split(":", 1) + headers.append((name.strip(), value.strip())) + return Headers(headers) + + +class MultipartEncoder: + def __init__(self, boundary: bytes) -> None: + self.boundary = boundary + self.state = State.PREAMBLE + + def send_event(self, event: Event) -> bytes: + if isinstance(event, Preamble) and self.state == State.PREAMBLE: + self.state = State.PART + return event.data + elif isinstance(event, (Field, File)) and self.state in { + State.PREAMBLE, + State.PART, + State.DATA, + }: + self.state = State.DATA + data = b"\r\n--" + self.boundary + b"\r\n" + data += b'Content-Disposition: form-data; name="%s"' % _to_bytes(event.name) + if isinstance(event, File): + data += b'; filename="%s"' % _to_bytes(event.filename) + data += b"\r\n" + for name, value in cast(Field, event).headers: + if name.lower() != "content-disposition": + data += _to_bytes(f"{name}: {value}\r\n") + data += b"\r\n" + return data + elif isinstance(event, Data) and self.state == State.DATA: + return event.data + elif isinstance(event, Epilogue): + self.state = State.COMPLETE + return b"\r\n--" + self.boundary + b"--\r\n" + event.data + else: + raise ValueError(f"Cannot generate {event} in state: {self.state}") diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/request.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/request.py new file mode 100644 index 0000000..8832baa --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/request.py @@ -0,0 +1,547 @@ +import typing as t +from datetime import datetime + +from .._internal import _to_str +from ..datastructures import Accept +from ..datastructures import Authorization +from ..datastructures import CharsetAccept +from ..datastructures import ETags +from ..datastructures import Headers +from ..datastructures import HeaderSet +from ..datastructures import IfRange +from ..datastructures import ImmutableList +from ..datastructures import ImmutableMultiDict +from ..datastructures import LanguageAccept +from ..datastructures import MIMEAccept +from ..datastructures import MultiDict +from ..datastructures import Range +from ..datastructures import RequestCacheControl +from ..http import parse_accept_header +from ..http import parse_authorization_header +from ..http import parse_cache_control_header +from ..http import parse_date +from ..http import parse_etags +from ..http import parse_if_range_header +from ..http import parse_list_header +from ..http import parse_options_header +from ..http import parse_range_header +from ..http import parse_set_header +from ..urls import url_decode +from ..user_agent import UserAgent +from ..utils import cached_property +from ..utils import header_property +from .http import parse_cookie +from .utils import get_current_url +from .utils import get_host + + +class Request: + """Represents the non-IO parts of a HTTP request, including the + method, URL info, and headers. + + This class is not meant for general use. It should only be used when + implementing WSGI, ASGI, or another HTTP application spec. Werkzeug + provides a WSGI implementation at :cls:`werkzeug.wrappers.Request`. + + :param method: The method the request was made with, such as + ``GET``. + :param scheme: The URL scheme of the protocol the request used, such + as ``https`` or ``wss``. + :param server: The address of the server. ``(host, port)``, + ``(path, None)`` for unix sockets, or ``None`` if not known. + :param root_path: The prefix that the application is mounted under. + This is prepended to generated URLs, but is not part of route + matching. + :param path: The path part of the URL after ``root_path``. + :param query_string: The part of the URL after the "?". + :param headers: The headers received with the request. + :param remote_addr: The address of the client sending the request. + + .. versionadded:: 2.0 + """ + + #: The charset used to decode most data in the request. + charset = "utf-8" + + #: the error handling procedure for errors, defaults to 'replace' + encoding_errors = "replace" + + #: the class to use for `args` and `form`. The default is an + #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports + #: multiple values per key. alternatively it makes sense to use an + #: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which + #: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict` + #: which is the fastest but only remembers the last key. It is also + #: possible to use mutable structures, but this is not recommended. + #: + #: .. versionadded:: 0.6 + parameter_storage_class: t.Type[MultiDict] = ImmutableMultiDict + + #: The type to be used for dict values from the incoming WSGI + #: environment. (For example for :attr:`cookies`.) By default an + #: :class:`~werkzeug.datastructures.ImmutableMultiDict` is used. + #: + #: .. versionchanged:: 1.0.0 + #: Changed to ``ImmutableMultiDict`` to support multiple values. + #: + #: .. versionadded:: 0.6 + dict_storage_class: t.Type[MultiDict] = ImmutableMultiDict + + #: the type to be used for list values from the incoming WSGI environment. + #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used + #: (for example for :attr:`access_list`). + #: + #: .. versionadded:: 0.6 + list_storage_class: t.Type[t.List] = ImmutableList + + user_agent_class: t.Type[UserAgent] = UserAgent + """The class used and returned by the :attr:`user_agent` property to + parse the header. Defaults to + :class:`~werkzeug.user_agent.UserAgent`, which does no parsing. An + extension can provide a subclass that uses a parser to provide other + data. + + .. versionadded:: 2.0 + """ + + #: Valid host names when handling requests. By default all hosts are + #: trusted, which means that whatever the client says the host is + #: will be accepted. + #: + #: Because ``Host`` and ``X-Forwarded-Host`` headers can be set to + #: any value by a malicious client, it is recommended to either set + #: this property or implement similar validation in the proxy (if + #: the application is being run behind one). + #: + #: .. versionadded:: 0.9 + trusted_hosts: t.Optional[t.List[str]] = None + + def __init__( + self, + method: str, + scheme: str, + server: t.Optional[t.Tuple[str, t.Optional[int]]], + root_path: str, + path: str, + query_string: bytes, + headers: Headers, + remote_addr: t.Optional[str], + ) -> None: + #: The method the request was made with, such as ``GET``. + self.method = method.upper() + #: The URL scheme of the protocol the request used, such as + #: ``https`` or ``wss``. + self.scheme = scheme + #: The address of the server. ``(host, port)``, ``(path, None)`` + #: for unix sockets, or ``None`` if not known. + self.server = server + #: The prefix that the application is mounted under, without a + #: trailing slash. :attr:`path` comes after this. + self.root_path = root_path.rstrip("/") + #: The path part of the URL after :attr:`root_path`. This is the + #: path used for routing within the application. + self.path = "/" + path.lstrip("/") + #: The part of the URL after the "?". This is the raw value, use + #: :attr:`args` for the parsed values. + self.query_string = query_string + #: The headers received with the request. + self.headers = headers + #: The address of the client sending the request. + self.remote_addr = remote_addr + + def __repr__(self) -> str: + try: + url = self.url + except Exception as e: + url = f"(invalid URL: {e})" + + return f"<{type(self).__name__} {url!r} [{self.method}]>" + + @property + def url_charset(self) -> str: + """The charset that is assumed for URLs. Defaults to the value + of :attr:`charset`. + + .. versionadded:: 0.6 + """ + return self.charset + + @cached_property + def args(self) -> "MultiDict[str, str]": + """The parsed URL parameters (the part in the URL after the question + mark). + + By default an + :class:`~werkzeug.datastructures.ImmutableMultiDict` + is returned from this function. This can be changed by setting + :attr:`parameter_storage_class` to a different type. This might + be necessary if the order of the form data is important. + """ + return url_decode( + self.query_string, + self.url_charset, + errors=self.encoding_errors, + cls=self.parameter_storage_class, + ) + + @cached_property + def access_route(self) -> t.List[str]: + """If a forwarded header exists this is a list of all ip addresses + from the client ip to the last proxy server. + """ + if "X-Forwarded-For" in self.headers: + return self.list_storage_class( + parse_list_header(self.headers["X-Forwarded-For"]) + ) + elif self.remote_addr is not None: + return self.list_storage_class([self.remote_addr]) + return self.list_storage_class() + + @cached_property + def full_path(self) -> str: + """Requested path, including the query string.""" + return f"{self.path}?{_to_str(self.query_string, self.url_charset)}" + + @property + def is_secure(self) -> bool: + """``True`` if the request was made with a secure protocol + (HTTPS or WSS). + """ + return self.scheme in {"https", "wss"} + + @cached_property + def url(self) -> str: + """The full request URL with the scheme, host, root path, path, + and query string.""" + return get_current_url( + self.scheme, self.host, self.root_path, self.path, self.query_string + ) + + @cached_property + def base_url(self) -> str: + """Like :attr:`url` but without the query string.""" + return get_current_url(self.scheme, self.host, self.root_path, self.path) + + @cached_property + def root_url(self) -> str: + """The request URL scheme, host, and root path. This is the root + that the application is accessed from. + """ + return get_current_url(self.scheme, self.host, self.root_path) + + @cached_property + def host_url(self) -> str: + """The request URL scheme and host only.""" + return get_current_url(self.scheme, self.host) + + @cached_property + def host(self) -> str: + """The host name the request was made to, including the port if + it's non-standard. Validated with :attr:`trusted_hosts`. + """ + return get_host( + self.scheme, self.headers.get("host"), self.server, self.trusted_hosts + ) + + @cached_property + def cookies(self) -> "ImmutableMultiDict[str, str]": + """A :class:`dict` with the contents of all cookies transmitted with + the request.""" + wsgi_combined_cookie = ";".join(self.headers.getlist("Cookie")) + return parse_cookie( # type: ignore + wsgi_combined_cookie, + self.charset, + self.encoding_errors, + cls=self.dict_storage_class, + ) + + # Common Descriptors + + content_type = header_property[str]( + "Content-Type", + doc="""The Content-Type entity-header field indicates the media + type of the entity-body sent to the recipient or, in the case of + the HEAD method, the media type that would have been sent had + the request been a GET.""", + read_only=True, + ) + + @cached_property + def content_length(self) -> t.Optional[int]: + """The Content-Length entity-header field indicates the size of the + entity-body in bytes or, in the case of the HEAD method, the size of + the entity-body that would have been sent had the request been a + GET. + """ + if self.headers.get("Transfer-Encoding", "") == "chunked": + return None + + content_length = self.headers.get("Content-Length") + if content_length is not None: + try: + return max(0, int(content_length)) + except (ValueError, TypeError): + pass + + return None + + content_encoding = header_property[str]( + "Content-Encoding", + doc="""The Content-Encoding entity-header field is used as a + modifier to the media-type. When present, its value indicates + what additional content codings have been applied to the + entity-body, and thus what decoding mechanisms must be applied + in order to obtain the media-type referenced by the Content-Type + header field. + + .. versionadded:: 0.9""", + read_only=True, + ) + content_md5 = header_property[str]( + "Content-MD5", + doc="""The Content-MD5 entity-header field, as defined in + RFC 1864, is an MD5 digest of the entity-body for the purpose of + providing an end-to-end message integrity check (MIC) of the + entity-body. (Note: a MIC is good for detecting accidental + modification of the entity-body in transit, but is not proof + against malicious attacks.) + + .. versionadded:: 0.9""", + read_only=True, + ) + referrer = header_property[str]( + "Referer", + doc="""The Referer[sic] request-header field allows the client + to specify, for the server's benefit, the address (URI) of the + resource from which the Request-URI was obtained (the + "referrer", although the header field is misspelled).""", + read_only=True, + ) + date = header_property( + "Date", + None, + parse_date, + doc="""The Date general-header field represents the date and + time at which the message was originated, having the same + semantics as orig-date in RFC 822. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + read_only=True, + ) + max_forwards = header_property( + "Max-Forwards", + None, + int, + doc="""The Max-Forwards request-header field provides a + mechanism with the TRACE and OPTIONS methods to limit the number + of proxies or gateways that can forward the request to the next + inbound server.""", + read_only=True, + ) + + def _parse_content_type(self) -> None: + if not hasattr(self, "_parsed_content_type"): + self._parsed_content_type = parse_options_header( + self.headers.get("Content-Type", "") + ) + + @property + def mimetype(self) -> str: + """Like :attr:`content_type`, but without parameters (eg, without + charset, type etc.) and always lowercase. For example if the content + type is ``text/HTML; charset=utf-8`` the mimetype would be + ``'text/html'``. + """ + self._parse_content_type() + return self._parsed_content_type[0].lower() + + @property + def mimetype_params(self) -> t.Dict[str, str]: + """The mimetype parameters as dict. For example if the content + type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + """ + self._parse_content_type() + return self._parsed_content_type[1] + + @cached_property + def pragma(self) -> HeaderSet: + """The Pragma general-header field is used to include + implementation-specific directives that might apply to any recipient + along the request/response chain. All pragma directives specify + optional behavior from the viewpoint of the protocol; however, some + systems MAY require that behavior be consistent with the directives. + """ + return parse_set_header(self.headers.get("Pragma", "")) + + # Accept + + @cached_property + def accept_mimetypes(self) -> MIMEAccept: + """List of mimetypes this client supports as + :class:`~werkzeug.datastructures.MIMEAccept` object. + """ + return parse_accept_header(self.headers.get("Accept"), MIMEAccept) + + @cached_property + def accept_charsets(self) -> CharsetAccept: + """List of charsets this client supports as + :class:`~werkzeug.datastructures.CharsetAccept` object. + """ + return parse_accept_header(self.headers.get("Accept-Charset"), CharsetAccept) + + @cached_property + def accept_encodings(self) -> Accept: + """List of encodings this client accepts. Encodings in a HTTP term + are compression encodings such as gzip. For charsets have a look at + :attr:`accept_charset`. + """ + return parse_accept_header(self.headers.get("Accept-Encoding")) + + @cached_property + def accept_languages(self) -> LanguageAccept: + """List of languages this client accepts as + :class:`~werkzeug.datastructures.LanguageAccept` object. + + .. versionchanged 0.5 + In previous versions this was a regular + :class:`~werkzeug.datastructures.Accept` object. + """ + return parse_accept_header(self.headers.get("Accept-Language"), LanguageAccept) + + # ETag + + @cached_property + def cache_control(self) -> RequestCacheControl: + """A :class:`~werkzeug.datastructures.RequestCacheControl` object + for the incoming cache control headers. + """ + cache_control = self.headers.get("Cache-Control") + return parse_cache_control_header(cache_control, None, RequestCacheControl) + + @cached_property + def if_match(self) -> ETags: + """An object containing all the etags in the `If-Match` header. + + :rtype: :class:`~werkzeug.datastructures.ETags` + """ + return parse_etags(self.headers.get("If-Match")) + + @cached_property + def if_none_match(self) -> ETags: + """An object containing all the etags in the `If-None-Match` header. + + :rtype: :class:`~werkzeug.datastructures.ETags` + """ + return parse_etags(self.headers.get("If-None-Match")) + + @cached_property + def if_modified_since(self) -> t.Optional[datetime]: + """The parsed `If-Modified-Since` header as a datetime object. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """ + return parse_date(self.headers.get("If-Modified-Since")) + + @cached_property + def if_unmodified_since(self) -> t.Optional[datetime]: + """The parsed `If-Unmodified-Since` header as a datetime object. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """ + return parse_date(self.headers.get("If-Unmodified-Since")) + + @cached_property + def if_range(self) -> IfRange: + """The parsed ``If-Range`` header. + + .. versionchanged:: 2.0 + ``IfRange.date`` is timezone-aware. + + .. versionadded:: 0.7 + """ + return parse_if_range_header(self.headers.get("If-Range")) + + @cached_property + def range(self) -> t.Optional[Range]: + """The parsed `Range` header. + + .. versionadded:: 0.7 + + :rtype: :class:`~werkzeug.datastructures.Range` + """ + return parse_range_header(self.headers.get("Range")) + + # User Agent + + @cached_property + def user_agent(self) -> UserAgent: + """The user agent. Use ``user_agent.string`` to get the header + value. Set :attr:`user_agent_class` to a subclass of + :class:`~werkzeug.user_agent.UserAgent` to provide parsing for + the other properties or other extended data. + + .. versionchanged:: 2.0 + The built in parser is deprecated and will be removed in + Werkzeug 2.1. A ``UserAgent`` subclass must be set to parse + data from the string. + """ + return self.user_agent_class(self.headers.get("User-Agent", "")) + + # Authorization + + @cached_property + def authorization(self) -> t.Optional[Authorization]: + """The `Authorization` object in parsed form.""" + return parse_authorization_header(self.headers.get("Authorization")) + + # CORS + + origin = header_property[str]( + "Origin", + doc=( + "The host that the request originated from. Set" + " :attr:`~CORSResponseMixin.access_control_allow_origin` on" + " the response to indicate which origins are allowed." + ), + read_only=True, + ) + + access_control_request_headers = header_property( + "Access-Control-Request-Headers", + load_func=parse_set_header, + doc=( + "Sent with a preflight request to indicate which headers" + " will be sent with the cross origin request. Set" + " :attr:`~CORSResponseMixin.access_control_allow_headers`" + " on the response to indicate which headers are allowed." + ), + read_only=True, + ) + + access_control_request_method = header_property[str]( + "Access-Control-Request-Method", + doc=( + "Sent with a preflight request to indicate which method" + " will be used for the cross origin request. Set" + " :attr:`~CORSResponseMixin.access_control_allow_methods`" + " on the response to indicate which methods are allowed." + ), + read_only=True, + ) + + @property + def is_json(self) -> bool: + """Check if the mimetype indicates JSON data, either + :mimetype:`application/json` or :mimetype:`application/*+json`. + """ + mt = self.mimetype + return ( + mt == "application/json" + or mt.startswith("application/") + and mt.endswith("+json") + ) diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/response.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/response.py new file mode 100644 index 0000000..de0bec2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/response.py @@ -0,0 +1,704 @@ +import typing as t +from datetime import datetime +from datetime import timedelta +from datetime import timezone +from http import HTTPStatus + +from .._internal import _to_str +from ..datastructures import Headers +from ..datastructures import HeaderSet +from ..http import dump_cookie +from ..http import HTTP_STATUS_CODES +from ..utils import get_content_type +from werkzeug.datastructures import CallbackDict +from werkzeug.datastructures import ContentRange +from werkzeug.datastructures import ContentSecurityPolicy +from werkzeug.datastructures import ResponseCacheControl +from werkzeug.datastructures import WWWAuthenticate +from werkzeug.http import COEP +from werkzeug.http import COOP +from werkzeug.http import dump_age +from werkzeug.http import dump_header +from werkzeug.http import dump_options_header +from werkzeug.http import http_date +from werkzeug.http import parse_age +from werkzeug.http import parse_cache_control_header +from werkzeug.http import parse_content_range_header +from werkzeug.http import parse_csp_header +from werkzeug.http import parse_date +from werkzeug.http import parse_options_header +from werkzeug.http import parse_set_header +from werkzeug.http import parse_www_authenticate_header +from werkzeug.http import quote_etag +from werkzeug.http import unquote_etag +from werkzeug.utils import header_property + + +def _set_property(name: str, doc: t.Optional[str] = None) -> property: + def fget(self: "Response") -> HeaderSet: + def on_update(header_set: HeaderSet) -> None: + if not header_set and name in self.headers: + del self.headers[name] + elif header_set: + self.headers[name] = header_set.to_header() + + return parse_set_header(self.headers.get(name), on_update) + + def fset( + self: "Response", + value: t.Optional[ + t.Union[str, t.Dict[str, t.Union[str, int]], t.Iterable[str]] + ], + ) -> None: + if not value: + del self.headers[name] + elif isinstance(value, str): + self.headers[name] = value + else: + self.headers[name] = dump_header(value) + + return property(fget, fset, doc=doc) + + +class Response: + """Represents the non-IO parts of an HTTP response, specifically the + status and headers but not the body. + + This class is not meant for general use. It should only be used when + implementing WSGI, ASGI, or another HTTP application spec. Werkzeug + provides a WSGI implementation at :cls:`werkzeug.wrappers.Response`. + + :param status: The status code for the response. Either an int, in + which case the default status message is added, or a string in + the form ``{code} {message}``, like ``404 Not Found``. Defaults + to 200. + :param headers: A :class:`~werkzeug.datastructures.Headers` object, + or a list of ``(key, value)`` tuples that will be converted to a + ``Headers`` object. + :param mimetype: The mime type (content type without charset or + other parameters) of the response. If the value starts with + ``text/`` (or matches some other special cases), the charset + will be added to create the ``content_type``. + :param content_type: The full content type of the response. + Overrides building the value from ``mimetype``. + + .. versionadded:: 2.0 + """ + + #: the charset of the response. + charset = "utf-8" + + #: the default status if none is provided. + default_status = 200 + + #: the default mimetype if none is provided. + default_mimetype: t.Optional[str] = "text/plain" + + #: Warn if a cookie header exceeds this size. The default, 4093, should be + #: safely `supported by most browsers `_. A cookie larger than + #: this size will still be sent, but it may be ignored or handled + #: incorrectly by some browsers. Set to 0 to disable this check. + #: + #: .. versionadded:: 0.13 + #: + #: .. _`cookie`: http://browsercookielimits.squawky.net/ + max_cookie_size = 4093 + + # A :class:`Headers` object representing the response headers. + headers: Headers + + def __init__( + self, + status: t.Optional[t.Union[int, str, HTTPStatus]] = None, + headers: t.Optional[ + t.Union[ + t.Mapping[str, t.Union[str, int, t.Iterable[t.Union[str, int]]]], + t.Iterable[t.Tuple[str, t.Union[str, int]]], + ] + ] = None, + mimetype: t.Optional[str] = None, + content_type: t.Optional[str] = None, + ) -> None: + if isinstance(headers, Headers): + self.headers = headers + elif not headers: + self.headers = Headers() + else: + self.headers = Headers(headers) + + if content_type is None: + if mimetype is None and "content-type" not in self.headers: + mimetype = self.default_mimetype + if mimetype is not None: + mimetype = get_content_type(mimetype, self.charset) + content_type = mimetype + if content_type is not None: + self.headers["Content-Type"] = content_type + if status is None: + status = self.default_status + self.status = status # type: ignore + + def __repr__(self) -> str: + return f"<{type(self).__name__} [{self.status}]>" + + @property + def status_code(self) -> int: + """The HTTP status code as a number.""" + return self._status_code + + @status_code.setter + def status_code(self, code: int) -> None: + self.status = code # type: ignore + + @property + def status(self) -> str: + """The HTTP status code as a string.""" + return self._status + + @status.setter + def status(self, value: t.Union[str, int, HTTPStatus]) -> None: + if not isinstance(value, (str, bytes, int, HTTPStatus)): + raise TypeError("Invalid status argument") + + self._status, self._status_code = self._clean_status(value) + + def _clean_status(self, value: t.Union[str, int, HTTPStatus]) -> t.Tuple[str, int]: + if isinstance(value, HTTPStatus): + value = int(value) + status = _to_str(value, self.charset) + split_status = status.split(None, 1) + + if len(split_status) == 0: + raise ValueError("Empty status argument") + + try: + status_code = int(split_status[0]) + except ValueError: + # only message + return f"0 {status}", 0 + + if len(split_status) > 1: + # code and message + return status, status_code + + # only code, look up message + try: + status = f"{status_code} {HTTP_STATUS_CODES[status_code].upper()}" + except KeyError: + status = f"{status_code} UNKNOWN" + + return status, status_code + + def set_cookie( + self, + key: str, + value: str = "", + max_age: t.Optional[t.Union[timedelta, int]] = None, + expires: t.Optional[t.Union[str, datetime, int, float]] = None, + path: t.Optional[str] = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: t.Optional[str] = None, + ) -> None: + """Sets a cookie. + + A warning is raised if the size of the cookie header exceeds + :attr:`max_cookie_size`, but the header will still be set. + + :param key: the key (name) of the cookie to be set. + :param value: the value of the cookie. + :param max_age: should be a number of seconds, or `None` (default) if + the cookie should last only as long as the client's + browser session. + :param expires: should be a `datetime` object or UNIX timestamp. + :param path: limits the cookie to a given path, per default it will + span the whole domain. + :param domain: if you want to set a cross-domain cookie. For example, + ``domain=".example.com"`` will set a cookie that is + readable by the domain ``www.example.com``, + ``foo.example.com`` etc. Otherwise, a cookie will only + be readable by the domain that set it. + :param secure: If ``True``, the cookie will only be available + via HTTPS. + :param httponly: Disallow JavaScript access to the cookie. + :param samesite: Limit the scope of the cookie to only be + attached to requests that are "same-site". + """ + self.headers.add( + "Set-Cookie", + dump_cookie( + key, + value=value, + max_age=max_age, + expires=expires, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + charset=self.charset, + max_size=self.max_cookie_size, + samesite=samesite, + ), + ) + + def delete_cookie( + self, + key: str, + path: str = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: t.Optional[str] = None, + ) -> None: + """Delete a cookie. Fails silently if key doesn't exist. + + :param key: the key (name) of the cookie to be deleted. + :param path: if the cookie that should be deleted was limited to a + path, the path has to be defined here. + :param domain: if the cookie that should be deleted was limited to a + domain, that domain has to be defined here. + :param secure: If ``True``, the cookie will only be available + via HTTPS. + :param httponly: Disallow JavaScript access to the cookie. + :param samesite: Limit the scope of the cookie to only be + attached to requests that are "same-site". + """ + self.set_cookie( + key, + expires=0, + max_age=0, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + samesite=samesite, + ) + + @property + def is_json(self) -> bool: + """Check if the mimetype indicates JSON data, either + :mimetype:`application/json` or :mimetype:`application/*+json`. + """ + mt = self.mimetype + return mt is not None and ( + mt == "application/json" + or mt.startswith("application/") + and mt.endswith("+json") + ) + + # Common Descriptors + + @property + def mimetype(self) -> t.Optional[str]: + """The mimetype (content type without charset etc.)""" + ct = self.headers.get("content-type") + + if ct: + return ct.split(";")[0].strip() + else: + return None + + @mimetype.setter + def mimetype(self, value: str) -> None: + self.headers["Content-Type"] = get_content_type(value, self.charset) + + @property + def mimetype_params(self) -> t.Dict[str, str]: + """The mimetype parameters as dict. For example if the + content type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + + .. versionadded:: 0.5 + """ + + def on_update(d: CallbackDict) -> None: + self.headers["Content-Type"] = dump_options_header(self.mimetype, d) + + d = parse_options_header(self.headers.get("content-type", ""))[1] + return CallbackDict(d, on_update) + + location = header_property[str]( + "Location", + doc="""The Location response-header field is used to redirect + the recipient to a location other than the Request-URI for + completion of the request or identification of a new + resource.""", + ) + age = header_property( + "Age", + None, + parse_age, + dump_age, # type: ignore + doc="""The Age response-header field conveys the sender's + estimate of the amount of time since the response (or its + revalidation) was generated at the origin server. + + Age values are non-negative decimal integers, representing time + in seconds.""", + ) + content_type = header_property[str]( + "Content-Type", + doc="""The Content-Type entity-header field indicates the media + type of the entity-body sent to the recipient or, in the case of + the HEAD method, the media type that would have been sent had + the request been a GET.""", + ) + content_length = header_property( + "Content-Length", + None, + int, + str, + doc="""The Content-Length entity-header field indicates the size + of the entity-body, in decimal number of OCTETs, sent to the + recipient or, in the case of the HEAD method, the size of the + entity-body that would have been sent had the request been a + GET.""", + ) + content_location = header_property[str]( + "Content-Location", + doc="""The Content-Location entity-header field MAY be used to + supply the resource location for the entity enclosed in the + message when that entity is accessible from a location separate + from the requested resource's URI.""", + ) + content_encoding = header_property[str]( + "Content-Encoding", + doc="""The Content-Encoding entity-header field is used as a + modifier to the media-type. When present, its value indicates + what additional content codings have been applied to the + entity-body, and thus what decoding mechanisms must be applied + in order to obtain the media-type referenced by the Content-Type + header field.""", + ) + content_md5 = header_property[str]( + "Content-MD5", + doc="""The Content-MD5 entity-header field, as defined in + RFC 1864, is an MD5 digest of the entity-body for the purpose of + providing an end-to-end message integrity check (MIC) of the + entity-body. (Note: a MIC is good for detecting accidental + modification of the entity-body in transit, but is not proof + against malicious attacks.)""", + ) + date = header_property( + "Date", + None, + parse_date, + http_date, + doc="""The Date general-header field represents the date and + time at which the message was originated, having the same + semantics as orig-date in RFC 822. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + ) + expires = header_property( + "Expires", + None, + parse_date, + http_date, + doc="""The Expires entity-header field gives the date/time after + which the response is considered stale. A stale cache entry may + not normally be returned by a cache. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + ) + last_modified = header_property( + "Last-Modified", + None, + parse_date, + http_date, + doc="""The Last-Modified entity-header field indicates the date + and time at which the origin server believes the variant was + last modified. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + ) + + @property + def retry_after(self) -> t.Optional[datetime]: + """The Retry-After response-header field can be used with a + 503 (Service Unavailable) response to indicate how long the + service is expected to be unavailable to the requesting client. + + Time in seconds until expiration or date. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """ + value = self.headers.get("retry-after") + if value is None: + return None + + try: + seconds = int(value) + except ValueError: + return parse_date(value) + + return datetime.now(timezone.utc) + timedelta(seconds=seconds) + + @retry_after.setter + def retry_after(self, value: t.Optional[t.Union[datetime, int, str]]) -> None: + if value is None: + if "retry-after" in self.headers: + del self.headers["retry-after"] + return + elif isinstance(value, datetime): + value = http_date(value) + else: + value = str(value) + self.headers["Retry-After"] = value + + vary = _set_property( + "Vary", + doc="""The Vary field value indicates the set of request-header + fields that fully determines, while the response is fresh, + whether a cache is permitted to use the response to reply to a + subsequent request without revalidation.""", + ) + content_language = _set_property( + "Content-Language", + doc="""The Content-Language entity-header field describes the + natural language(s) of the intended audience for the enclosed + entity. Note that this might not be equivalent to all the + languages used within the entity-body.""", + ) + allow = _set_property( + "Allow", + doc="""The Allow entity-header field lists the set of methods + supported by the resource identified by the Request-URI. The + purpose of this field is strictly to inform the recipient of + valid methods associated with the resource. An Allow header + field MUST be present in a 405 (Method Not Allowed) + response.""", + ) + + # ETag + + @property + def cache_control(self) -> ResponseCacheControl: + """The Cache-Control general-header field is used to specify + directives that MUST be obeyed by all caching mechanisms along the + request/response chain. + """ + + def on_update(cache_control: ResponseCacheControl) -> None: + if not cache_control and "cache-control" in self.headers: + del self.headers["cache-control"] + elif cache_control: + self.headers["Cache-Control"] = cache_control.to_header() + + return parse_cache_control_header( + self.headers.get("cache-control"), on_update, ResponseCacheControl + ) + + def set_etag(self, etag: str, weak: bool = False) -> None: + """Set the etag, and override the old one if there was one.""" + self.headers["ETag"] = quote_etag(etag, weak) + + def get_etag(self) -> t.Union[t.Tuple[str, bool], t.Tuple[None, None]]: + """Return a tuple in the form ``(etag, is_weak)``. If there is no + ETag the return value is ``(None, None)``. + """ + return unquote_etag(self.headers.get("ETag")) + + accept_ranges = header_property[str]( + "Accept-Ranges", + doc="""The `Accept-Ranges` header. Even though the name would + indicate that multiple values are supported, it must be one + string token only. + + The values ``'bytes'`` and ``'none'`` are common. + + .. versionadded:: 0.7""", + ) + + @property + def content_range(self) -> ContentRange: + """The ``Content-Range`` header as a + :class:`~werkzeug.datastructures.ContentRange` object. Available + even if the header is not set. + + .. versionadded:: 0.7 + """ + + def on_update(rng: ContentRange) -> None: + if not rng: + del self.headers["content-range"] + else: + self.headers["Content-Range"] = rng.to_header() + + rv = parse_content_range_header(self.headers.get("content-range"), on_update) + # always provide a content range object to make the descriptor + # more user friendly. It provides an unset() method that can be + # used to remove the header quickly. + if rv is None: + rv = ContentRange(None, None, None, on_update=on_update) + return rv + + @content_range.setter + def content_range(self, value: t.Optional[t.Union[ContentRange, str]]) -> None: + if not value: + del self.headers["content-range"] + elif isinstance(value, str): + self.headers["Content-Range"] = value + else: + self.headers["Content-Range"] = value.to_header() + + # Authorization + + @property + def www_authenticate(self) -> WWWAuthenticate: + """The ``WWW-Authenticate`` header in a parsed form.""" + + def on_update(www_auth: WWWAuthenticate) -> None: + if not www_auth and "www-authenticate" in self.headers: + del self.headers["www-authenticate"] + elif www_auth: + self.headers["WWW-Authenticate"] = www_auth.to_header() + + header = self.headers.get("www-authenticate") + return parse_www_authenticate_header(header, on_update) + + # CSP + + @property + def content_security_policy(self) -> ContentSecurityPolicy: + """The ``Content-Security-Policy`` header as a + :class:`~werkzeug.datastructures.ContentSecurityPolicy` object. Available + even if the header is not set. + + The Content-Security-Policy header adds an additional layer of + security to help detect and mitigate certain types of attacks. + """ + + def on_update(csp: ContentSecurityPolicy) -> None: + if not csp: + del self.headers["content-security-policy"] + else: + self.headers["Content-Security-Policy"] = csp.to_header() + + rv = parse_csp_header(self.headers.get("content-security-policy"), on_update) + if rv is None: + rv = ContentSecurityPolicy(None, on_update=on_update) + return rv + + @content_security_policy.setter + def content_security_policy( + self, value: t.Optional[t.Union[ContentSecurityPolicy, str]] + ) -> None: + if not value: + del self.headers["content-security-policy"] + elif isinstance(value, str): + self.headers["Content-Security-Policy"] = value + else: + self.headers["Content-Security-Policy"] = value.to_header() + + @property + def content_security_policy_report_only(self) -> ContentSecurityPolicy: + """The ``Content-Security-policy-report-only`` header as a + :class:`~werkzeug.datastructures.ContentSecurityPolicy` object. Available + even if the header is not set. + + The Content-Security-Policy-Report-Only header adds a csp policy + that is not enforced but is reported thereby helping detect + certain types of attacks. + """ + + def on_update(csp: ContentSecurityPolicy) -> None: + if not csp: + del self.headers["content-security-policy-report-only"] + else: + self.headers["Content-Security-policy-report-only"] = csp.to_header() + + rv = parse_csp_header( + self.headers.get("content-security-policy-report-only"), on_update + ) + if rv is None: + rv = ContentSecurityPolicy(None, on_update=on_update) + return rv + + @content_security_policy_report_only.setter + def content_security_policy_report_only( + self, value: t.Optional[t.Union[ContentSecurityPolicy, str]] + ) -> None: + if not value: + del self.headers["content-security-policy-report-only"] + elif isinstance(value, str): + self.headers["Content-Security-policy-report-only"] = value + else: + self.headers["Content-Security-policy-report-only"] = value.to_header() + + # CORS + + @property + def access_control_allow_credentials(self) -> bool: + """Whether credentials can be shared by the browser to + JavaScript code. As part of the preflight request it indicates + whether credentials can be used on the cross origin request. + """ + return "Access-Control-Allow-Credentials" in self.headers + + @access_control_allow_credentials.setter + def access_control_allow_credentials(self, value: t.Optional[bool]) -> None: + if value is True: + self.headers["Access-Control-Allow-Credentials"] = "true" + else: + self.headers.pop("Access-Control-Allow-Credentials", None) + + access_control_allow_headers = header_property( + "Access-Control-Allow-Headers", + load_func=parse_set_header, + dump_func=dump_header, + doc="Which headers can be sent with the cross origin request.", + ) + + access_control_allow_methods = header_property( + "Access-Control-Allow-Methods", + load_func=parse_set_header, + dump_func=dump_header, + doc="Which methods can be used for the cross origin request.", + ) + + access_control_allow_origin = header_property[str]( + "Access-Control-Allow-Origin", + doc="The origin or '*' for any origin that may make cross origin requests.", + ) + + access_control_expose_headers = header_property( + "Access-Control-Expose-Headers", + load_func=parse_set_header, + dump_func=dump_header, + doc="Which headers can be shared by the browser to JavaScript code.", + ) + + access_control_max_age = header_property( + "Access-Control-Max-Age", + load_func=int, + dump_func=str, + doc="The maximum age in seconds the access control settings can be cached for.", + ) + + cross_origin_opener_policy = header_property[COOP]( + "Cross-Origin-Opener-Policy", + load_func=lambda value: COOP(value), + dump_func=lambda value: value.value, + default=COOP.UNSAFE_NONE, + doc="""Allows control over sharing of browsing context group with cross-origin + documents. Values must be a member of the :class:`werkzeug.http.COOP` enum.""", + ) + + cross_origin_embedder_policy = header_property[COEP]( + "Cross-Origin-Embedder-Policy", + load_func=lambda value: COEP(value), + dump_func=lambda value: value.value, + default=COEP.UNSAFE_NONE, + doc="""Prevents a document from loading any cross-origin resources that do not + explicitly grant the document permission. Values must be a member of the + :class:`werkzeug.http.COEP` enum.""", + ) diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py new file mode 100644 index 0000000..e639dcb --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py @@ -0,0 +1,165 @@ +import typing as t + +from .._internal import _encode_idna +from ..exceptions import SecurityError +from ..urls import uri_to_iri +from ..urls import url_quote + + +def host_is_trusted(hostname: str, trusted_list: t.Iterable[str]) -> bool: + """Check if a host matches a list of trusted names. + + :param hostname: The name to check. + :param trusted_list: A list of valid names to match. If a name + starts with a dot it will match all subdomains. + + .. versionadded:: 0.9 + """ + if not hostname: + return False + + if isinstance(trusted_list, str): + trusted_list = [trusted_list] + + def _normalize(hostname: str) -> bytes: + if ":" in hostname: + hostname = hostname.rsplit(":", 1)[0] + + return _encode_idna(hostname) + + try: + hostname_bytes = _normalize(hostname) + except UnicodeError: + return False + + for ref in trusted_list: + if ref.startswith("."): + ref = ref[1:] + suffix_match = True + else: + suffix_match = False + + try: + ref_bytes = _normalize(ref) + except UnicodeError: + return False + + if ref_bytes == hostname_bytes: + return True + + if suffix_match and hostname_bytes.endswith(b"." + ref_bytes): + return True + + return False + + +def get_host( + scheme: str, + host_header: t.Optional[str], + server: t.Optional[t.Tuple[str, t.Optional[int]]] = None, + trusted_hosts: t.Optional[t.Iterable[str]] = None, +) -> str: + """Return the host for the given parameters. + + This first checks the ``host_header``. If it's not present, then + ``server`` is used. The host will only contain the port if it is + different than the standard port for the protocol. + + Optionally, verify that the host is trusted using + :func:`host_is_trusted` and raise a + :exc:`~werkzeug.exceptions.SecurityError` if it is not. + + :param scheme: The protocol the request used, like ``"https"``. + :param host_header: The ``Host`` header value. + :param server: Address of the server. ``(host, port)``, or + ``(path, None)`` for unix sockets. + :param trusted_hosts: A list of trusted host names. + + :return: Host, with port if necessary. + :raise ~werkzeug.exceptions.SecurityError: If the host is not + trusted. + """ + host = "" + + if host_header is not None: + host = host_header + elif server is not None: + host = server[0] + + if server[1] is not None: + host = f"{host}:{server[1]}" + + if scheme in {"http", "ws"} and host.endswith(":80"): + host = host[:-3] + elif scheme in {"https", "wss"} and host.endswith(":443"): + host = host[:-4] + + if trusted_hosts is not None: + if not host_is_trusted(host, trusted_hosts): + raise SecurityError(f"Host {host!r} is not trusted.") + + return host + + +def get_current_url( + scheme: str, + host: str, + root_path: t.Optional[str] = None, + path: t.Optional[str] = None, + query_string: t.Optional[bytes] = None, +) -> str: + """Recreate the URL for a request. If an optional part isn't + provided, it and subsequent parts are not included in the URL. + + The URL is an IRI, not a URI, so it may contain Unicode characters. + Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII. + + :param scheme: The protocol the request used, like ``"https"``. + :param host: The host the request was made to. See :func:`get_host`. + :param root_path: Prefix that the application is mounted under. This + is prepended to ``path``. + :param path: The path part of the URL after ``root_path``. + :param query_string: The portion of the URL after the "?". + """ + url = [scheme, "://", host] + + if root_path is None: + url.append("/") + return uri_to_iri("".join(url)) + + url.append(url_quote(root_path.rstrip("/"))) + url.append("/") + + if path is None: + return uri_to_iri("".join(url)) + + url.append(url_quote(path.lstrip("/"))) + + if query_string: + url.append("?") + url.append(url_quote(query_string, safe=":&%=+$!*'(),")) + + return uri_to_iri("".join(url)) + + +def get_content_length( + http_content_length: t.Union[str, None] = None, + http_transfer_encoding: t.Union[str, None] = "", +) -> t.Optional[int]: + """Returns the content length as an integer or ``None`` if + unavailable or chunked transfer encoding is used. + + :param http_content_length: The Content-Length HTTP header. + :param http_transfer_encoding: The Transfer-Encoding HTTP header. + + .. versionadded:: 2.2 + """ + if http_transfer_encoding == "chunked": + return None + + if http_content_length is not None: + try: + return max(0, int(http_content_length)) + except (ValueError, TypeError): + pass + return None diff --git a/venv/lib/python3.8/site-packages/werkzeug/security.py b/venv/lib/python3.8/site-packages/werkzeug/security.py new file mode 120000 index 0000000..0c73e14 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/security.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/be/b0/68/7e1e16659a14a35780749e85d43af35519586ae192d825036296d02a3f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/serving.py b/venv/lib/python3.8/site-packages/werkzeug/serving.py new file mode 100644 index 0000000..c482469 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/serving.py @@ -0,0 +1,1098 @@ +"""A WSGI and HTTP server for use **during development only**. This +server is convenient to use, but is not designed to be particularly +stable, secure, or efficient. Use a dedicate WSGI server and HTTP +server when deploying to production. + +It provides features like interactive debugging and code reloading. Use +``run_simple`` to start the server. Put this in a ``run.py`` script: + +.. code-block:: python + + from myapp import create_app + from werkzeug import run_simple +""" +import errno +import io +import os +import socket +import socketserver +import sys +import typing as t +from datetime import datetime as dt +from datetime import timedelta +from datetime import timezone +from http.server import BaseHTTPRequestHandler +from http.server import HTTPServer + +from ._internal import _log +from ._internal import _wsgi_encoding_dance +from .exceptions import InternalServerError +from .urls import uri_to_iri +from .urls import url_parse +from .urls import url_unquote + +try: + import ssl +except ImportError: + + class _SslDummy: + def __getattr__(self, name: str) -> t.Any: + raise RuntimeError( # noqa: B904 + "SSL is unavailable because this Python runtime was not" + " compiled with SSL/TLS support." + ) + + ssl = _SslDummy() # type: ignore + +_log_add_style = True + +if os.name == "nt": + try: + __import__("colorama") + except ImportError: + _log_add_style = False + +can_fork = hasattr(os, "fork") + +if can_fork: + ForkingMixIn = socketserver.ForkingMixIn +else: + + class ForkingMixIn: # type: ignore + pass + + +try: + af_unix = socket.AF_UNIX +except AttributeError: + af_unix = None # type: ignore + +LISTEN_QUEUE = 128 + +_TSSLContextArg = t.Optional[ + t.Union["ssl.SSLContext", t.Tuple[str, t.Optional[str]], "te.Literal['adhoc']"] +] + +if t.TYPE_CHECKING: + import typing_extensions as te # noqa: F401 + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKeyWithSerialization, + ) + from cryptography.x509 import Certificate + + +class DechunkedInput(io.RawIOBase): + """An input stream that handles Transfer-Encoding 'chunked'""" + + def __init__(self, rfile: t.IO[bytes]) -> None: + self._rfile = rfile + self._done = False + self._len = 0 + + def readable(self) -> bool: + return True + + def read_chunk_len(self) -> int: + try: + line = self._rfile.readline().decode("latin1") + _len = int(line.strip(), 16) + except ValueError as e: + raise OSError("Invalid chunk header") from e + if _len < 0: + raise OSError("Negative chunk length not allowed") + return _len + + def readinto(self, buf: bytearray) -> int: # type: ignore + read = 0 + while not self._done and read < len(buf): + if self._len == 0: + # This is the first chunk or we fully consumed the previous + # one. Read the next length of the next chunk + self._len = self.read_chunk_len() + + if self._len == 0: + # Found the final chunk of size 0. The stream is now exhausted, + # but there is still a final newline that should be consumed + self._done = True + + if self._len > 0: + # There is data (left) in this chunk, so append it to the + # buffer. If this operation fully consumes the chunk, this will + # reset self._len to 0. + n = min(len(buf), self._len) + + # If (read + chunk size) becomes more than len(buf), buf will + # grow beyond the original size and read more data than + # required. So only read as much data as can fit in buf. + if read + n > len(buf): + buf[read:] = self._rfile.read(len(buf) - read) + self._len -= len(buf) - read + read = len(buf) + else: + buf[read : read + n] = self._rfile.read(n) + self._len -= n + read += n + + if self._len == 0: + # Skip the terminating newline of a chunk that has been fully + # consumed. This also applies to the 0-sized final chunk + terminator = self._rfile.readline() + if terminator not in (b"\n", b"\r\n", b"\r"): + raise OSError("Missing chunk terminating newline") + + return read + + +class WSGIRequestHandler(BaseHTTPRequestHandler): + """A request handler that implements WSGI dispatching.""" + + server: "BaseWSGIServer" + + @property + def server_version(self) -> str: # type: ignore + from . import __version__ + + return f"Werkzeug/{__version__}" + + def make_environ(self) -> "WSGIEnvironment": + request_url = url_parse(self.path) + url_scheme = "http" if self.server.ssl_context is None else "https" + + if not self.client_address: + self.client_address = ("", 0) + elif isinstance(self.client_address, str): + self.client_address = (self.client_address, 0) + + # If there was no scheme but the path started with two slashes, + # the first segment may have been incorrectly parsed as the + # netloc, prepend it to the path again. + if not request_url.scheme and request_url.netloc: + path_info = f"/{request_url.netloc}{request_url.path}" + else: + path_info = request_url.path + + path_info = url_unquote(path_info) + + environ: "WSGIEnvironment" = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": url_scheme, + "wsgi.input": self.rfile, + "wsgi.errors": sys.stderr, + "wsgi.multithread": self.server.multithread, + "wsgi.multiprocess": self.server.multiprocess, + "wsgi.run_once": False, + "werkzeug.socket": self.connection, + "SERVER_SOFTWARE": self.server_version, + "REQUEST_METHOD": self.command, + "SCRIPT_NAME": "", + "PATH_INFO": _wsgi_encoding_dance(path_info), + "QUERY_STRING": _wsgi_encoding_dance(request_url.query), + # Non-standard, added by mod_wsgi, uWSGI + "REQUEST_URI": _wsgi_encoding_dance(self.path), + # Non-standard, added by gunicorn + "RAW_URI": _wsgi_encoding_dance(self.path), + "REMOTE_ADDR": self.address_string(), + "REMOTE_PORT": self.port_integer(), + "SERVER_NAME": self.server.server_address[0], + "SERVER_PORT": str(self.server.server_address[1]), + "SERVER_PROTOCOL": self.request_version, + } + + for key, value in self.headers.items(): + key = key.upper().replace("-", "_") + value = value.replace("\r\n", "") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = f"HTTP_{key}" + if key in environ: + value = f"{environ[key]},{value}" + environ[key] = value + + if environ.get("HTTP_TRANSFER_ENCODING", "").strip().lower() == "chunked": + environ["wsgi.input_terminated"] = True + environ["wsgi.input"] = DechunkedInput(environ["wsgi.input"]) + + # Per RFC 2616, if the URL is absolute, use that as the host. + # We're using "has a scheme" to indicate an absolute URL. + if request_url.scheme and request_url.netloc: + environ["HTTP_HOST"] = request_url.netloc + + try: + # binary_form=False gives nicer information, but wouldn't be compatible with + # what Nginx or Apache could return. + peer_cert = self.connection.getpeercert( # type: ignore[attr-defined] + binary_form=True + ) + if peer_cert is not None: + # Nginx and Apache use PEM format. + environ["SSL_CLIENT_CERT"] = ssl.DER_cert_to_PEM_cert(peer_cert) + except ValueError: + # SSL handshake hasn't finished. + self.server.log("error", "Cannot fetch SSL peer certificate info") + except AttributeError: + # Not using TLS, the socket will not have getpeercert(). + pass + + return environ + + def run_wsgi(self) -> None: + if self.headers.get("Expect", "").lower().strip() == "100-continue": + self.wfile.write(b"HTTP/1.1 100 Continue\r\n\r\n") + + self.environ = environ = self.make_environ() + status_set: t.Optional[str] = None + headers_set: t.Optional[t.List[t.Tuple[str, str]]] = None + status_sent: t.Optional[str] = None + headers_sent: t.Optional[t.List[t.Tuple[str, str]]] = None + chunk_response: bool = False + + def write(data: bytes) -> None: + nonlocal status_sent, headers_sent, chunk_response + assert status_set is not None, "write() before start_response" + assert headers_set is not None, "write() before start_response" + if status_sent is None: + status_sent = status_set + headers_sent = headers_set + try: + code_str, msg = status_sent.split(None, 1) + except ValueError: + code_str, msg = status_sent, "" + code = int(code_str) + self.send_response(code, msg) + header_keys = set() + for key, value in headers_sent: + self.send_header(key, value) + header_keys.add(key.lower()) + + # Use chunked transfer encoding if there is no content + # length. Do not use for 1xx and 204 responses. 304 + # responses and HEAD requests are also excluded, which + # is the more conservative behavior and matches other + # parts of the code. + # https://httpwg.org/specs/rfc7230.html#rfc.section.3.3.1 + if ( + not ( + "content-length" in header_keys + or environ["REQUEST_METHOD"] == "HEAD" + or (100 <= code < 200) + or code in {204, 304} + ) + and self.protocol_version >= "HTTP/1.1" + ): + chunk_response = True + self.send_header("Transfer-Encoding", "chunked") + + # Always close the connection. This disables HTTP/1.1 + # keep-alive connections. They aren't handled well by + # Python's http.server because it doesn't know how to + # drain the stream before the next request line. + self.send_header("Connection", "close") + self.end_headers() + + assert isinstance(data, bytes), "applications must write bytes" + + if data: + if chunk_response: + self.wfile.write(hex(len(data))[2:].encode()) + self.wfile.write(b"\r\n") + + self.wfile.write(data) + + if chunk_response: + self.wfile.write(b"\r\n") + + self.wfile.flush() + + def start_response(status, headers, exc_info=None): # type: ignore + nonlocal status_set, headers_set + if exc_info: + try: + if headers_sent: + raise exc_info[1].with_traceback(exc_info[2]) + finally: + exc_info = None + elif headers_set: + raise AssertionError("Headers already set") + status_set = status + headers_set = headers + return write + + def execute(app: "WSGIApplication") -> None: + application_iter = app(environ, start_response) + try: + for data in application_iter: + write(data) + if not headers_sent: + write(b"") + if chunk_response: + self.wfile.write(b"0\r\n\r\n") + finally: + if hasattr(application_iter, "close"): + application_iter.close() # type: ignore + + try: + execute(self.server.app) + except (ConnectionError, socket.timeout) as e: + self.connection_dropped(e, environ) + except Exception as e: + if self.server.passthrough_errors: + raise + + if status_sent is not None and chunk_response: + self.close_connection = True + + try: + # if we haven't yet sent the headers but they are set + # we roll back to be able to set them again. + if status_sent is None: + status_set = None + headers_set = None + execute(InternalServerError()) + except Exception: + pass + + from .debug.tbtools import DebugTraceback + + msg = DebugTraceback(e).render_traceback_text() + self.server.log("error", f"Error on request:\n{msg}") + + def handle(self) -> None: + """Handles a request ignoring dropped connections.""" + try: + super().handle() + except (ConnectionError, socket.timeout) as e: + self.connection_dropped(e) + except Exception as e: + if self.server.ssl_context is not None and is_ssl_error(e): + self.log_error("SSL error occurred: %s", e) + else: + raise + + def connection_dropped( + self, error: BaseException, environ: t.Optional["WSGIEnvironment"] = None + ) -> None: + """Called if the connection was closed by the client. By default + nothing happens. + """ + + def __getattr__(self, name: str) -> t.Any: + # All HTTP methods are handled by run_wsgi. + if name.startswith("do_"): + return self.run_wsgi + + # All other attributes are forwarded to the base class. + return getattr(super(), name) + + def address_string(self) -> str: + if getattr(self, "environ", None): + return self.environ["REMOTE_ADDR"] # type: ignore + + if not self.client_address: + return "" + + return self.client_address[0] + + def port_integer(self) -> int: + return self.client_address[1] + + def log_request( + self, code: t.Union[int, str] = "-", size: t.Union[int, str] = "-" + ) -> None: + try: + path = uri_to_iri(self.path) + msg = f"{self.command} {path} {self.request_version}" + except AttributeError: + # path isn't set if the requestline was bad + msg = self.requestline + + code = str(code) + + if code[0] == "1": # 1xx - Informational + msg = _ansi_style(msg, "bold") + elif code == "200": # 2xx - Success + pass + elif code == "304": # 304 - Resource Not Modified + msg = _ansi_style(msg, "cyan") + elif code[0] == "3": # 3xx - Redirection + msg = _ansi_style(msg, "green") + elif code == "404": # 404 - Resource Not Found + msg = _ansi_style(msg, "yellow") + elif code[0] == "4": # 4xx - Client Error + msg = _ansi_style(msg, "bold", "red") + else: # 5xx, or any other response + msg = _ansi_style(msg, "bold", "magenta") + + self.log("info", '"%s" %s %s', msg, code, size) + + def log_error(self, format: str, *args: t.Any) -> None: + self.log("error", format, *args) + + def log_message(self, format: str, *args: t.Any) -> None: + self.log("info", format, *args) + + def log(self, type: str, message: str, *args: t.Any) -> None: + _log( + type, + f"{self.address_string()} - - [{self.log_date_time_string()}] {message}\n", + *args, + ) + + +def _ansi_style(value: str, *styles: str) -> str: + if not _log_add_style: + return value + + codes = { + "bold": 1, + "red": 31, + "green": 32, + "yellow": 33, + "magenta": 35, + "cyan": 36, + } + + for style in styles: + value = f"\x1b[{codes[style]}m{value}" + + return f"{value}\x1b[0m" + + +def generate_adhoc_ssl_pair( + cn: t.Optional[str] = None, +) -> t.Tuple["Certificate", "RSAPrivateKeyWithSerialization"]: + try: + from cryptography import x509 + from cryptography.x509.oid import NameOID + from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.asymmetric import rsa + except ImportError: + raise TypeError( + "Using ad-hoc certificates requires the cryptography library." + ) from None + + backend = default_backend() + pkey = rsa.generate_private_key( + public_exponent=65537, key_size=2048, backend=backend + ) + + # pretty damn sure that this is not actually accepted by anyone + if cn is None: + cn = "*" + + subject = x509.Name( + [ + x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Dummy Certificate"), + x509.NameAttribute(NameOID.COMMON_NAME, cn), + ] + ) + + backend = default_backend() + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(subject) + .public_key(pkey.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(dt.now(timezone.utc)) + .not_valid_after(dt.now(timezone.utc) + timedelta(days=365)) + .add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False) + .add_extension(x509.SubjectAlternativeName([x509.DNSName(cn)]), critical=False) + .sign(pkey, hashes.SHA256(), backend) + ) + return cert, pkey + + +def make_ssl_devcert( + base_path: str, host: t.Optional[str] = None, cn: t.Optional[str] = None +) -> t.Tuple[str, str]: + """Creates an SSL key for development. This should be used instead of + the ``'adhoc'`` key which generates a new cert on each server start. + It accepts a path for where it should store the key and cert and + either a host or CN. If a host is given it will use the CN + ``*.host/CN=host``. + + For more information see :func:`run_simple`. + + .. versionadded:: 0.9 + + :param base_path: the path to the certificate and key. The extension + ``.crt`` is added for the certificate, ``.key`` is + added for the key. + :param host: the name of the host. This can be used as an alternative + for the `cn`. + :param cn: the `CN` to use. + """ + + if host is not None: + cn = f"*.{host}/CN={host}" + cert, pkey = generate_adhoc_ssl_pair(cn=cn) + + from cryptography.hazmat.primitives import serialization + + cert_file = f"{base_path}.crt" + pkey_file = f"{base_path}.key" + + with open(cert_file, "wb") as f: + f.write(cert.public_bytes(serialization.Encoding.PEM)) + with open(pkey_file, "wb") as f: + f.write( + pkey.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + + return cert_file, pkey_file + + +def generate_adhoc_ssl_context() -> "ssl.SSLContext": + """Generates an adhoc SSL context for the development server.""" + import tempfile + import atexit + + cert, pkey = generate_adhoc_ssl_pair() + + from cryptography.hazmat.primitives import serialization + + cert_handle, cert_file = tempfile.mkstemp() + pkey_handle, pkey_file = tempfile.mkstemp() + atexit.register(os.remove, pkey_file) + atexit.register(os.remove, cert_file) + + os.write(cert_handle, cert.public_bytes(serialization.Encoding.PEM)) + os.write( + pkey_handle, + pkey.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ), + ) + + os.close(cert_handle) + os.close(pkey_handle) + ctx = load_ssl_context(cert_file, pkey_file) + return ctx + + +def load_ssl_context( + cert_file: str, pkey_file: t.Optional[str] = None, protocol: t.Optional[int] = None +) -> "ssl.SSLContext": + """Loads SSL context from cert/private key files and optional protocol. + Many parameters are directly taken from the API of + :py:class:`ssl.SSLContext`. + + :param cert_file: Path of the certificate to use. + :param pkey_file: Path of the private key to use. If not given, the key + will be obtained from the certificate file. + :param protocol: A ``PROTOCOL`` constant from the :mod:`ssl` module. + Defaults to :data:`ssl.PROTOCOL_TLS_SERVER`. + """ + if protocol is None: + protocol = ssl.PROTOCOL_TLS_SERVER + + ctx = ssl.SSLContext(protocol) + ctx.load_cert_chain(cert_file, pkey_file) + return ctx + + +def is_ssl_error(error: t.Optional[Exception] = None) -> bool: + """Checks if the given error (or the current one) is an SSL error.""" + if error is None: + error = t.cast(Exception, sys.exc_info()[1]) + return isinstance(error, ssl.SSLError) + + +def select_address_family(host: str, port: int) -> socket.AddressFamily: + """Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on + the host and port.""" + if host.startswith("unix://"): + return socket.AF_UNIX + elif ":" in host and hasattr(socket, "AF_INET6"): + return socket.AF_INET6 + return socket.AF_INET + + +def get_sockaddr( + host: str, port: int, family: socket.AddressFamily +) -> t.Union[t.Tuple[str, int], str]: + """Return a fully qualified socket address that can be passed to + :func:`socket.bind`.""" + if family == af_unix: + return host.split("://", 1)[1] + try: + res = socket.getaddrinfo( + host, port, family, socket.SOCK_STREAM, socket.IPPROTO_TCP + ) + except socket.gaierror: + return host, port + return res[0][4] # type: ignore + + +def get_interface_ip(family: socket.AddressFamily) -> str: + """Get the IP address of an external interface. Used when binding to + 0.0.0.0 or ::1 to show a more useful URL. + + :meta private: + """ + # arbitrary private address + host = "fd31:f903:5ab5:1::1" if family == socket.AF_INET6 else "10.253.155.219" + + with socket.socket(family, socket.SOCK_DGRAM) as s: + try: + s.connect((host, 58162)) + except OSError: + return "::1" if family == socket.AF_INET6 else "127.0.0.1" + + return s.getsockname()[0] # type: ignore + + +class BaseWSGIServer(HTTPServer): + """A WSGI server that that handles one request at a time. + + Use :func:`make_server` to create a server instance. + """ + + multithread = False + multiprocess = False + request_queue_size = LISTEN_QUEUE + + def __init__( + self, + host: str, + port: int, + app: "WSGIApplication", + handler: t.Optional[t.Type[WSGIRequestHandler]] = None, + passthrough_errors: bool = False, + ssl_context: t.Optional[_TSSLContextArg] = None, + fd: t.Optional[int] = None, + ) -> None: + if handler is None: + handler = WSGIRequestHandler + + # If the handler doesn't directly set a protocol version and + # thread or process workers are used, then allow chunked + # responses and keep-alive connections by enabling HTTP/1.1. + if "protocol_version" not in vars(handler) and ( + self.multithread or self.multiprocess + ): + handler.protocol_version = "HTTP/1.1" + + self.host = host + self.port = port + self.app = app + self.passthrough_errors = passthrough_errors + + self.address_family = address_family = select_address_family(host, port) + server_address = get_sockaddr(host, int(port), address_family) + + # Remove a leftover Unix socket file from a previous run. Don't + # remove a file that was set up by run_simple. + if address_family == af_unix and fd is None: + server_address = t.cast(str, server_address) + + if os.path.exists(server_address): + os.unlink(server_address) + + # Bind and activate will be handled manually, it should only + # happen if we're not using a socket that was already set up. + super().__init__( + server_address, # type: ignore[arg-type] + handler, + bind_and_activate=False, + ) + + if fd is None: + # No existing socket descriptor, do bind_and_activate=True. + try: + self.server_bind() + self.server_activate() + except BaseException: + self.server_close() + raise + else: + # Use the passed in socket directly. + self.socket = socket.fromfd(fd, address_family, socket.SOCK_STREAM) + self.server_address = self.socket.getsockname() + + if address_family != af_unix: + # If port was 0, this will record the bound port. + self.port = self.server_address[1] + + if ssl_context is not None: + if isinstance(ssl_context, tuple): + ssl_context = load_ssl_context(*ssl_context) + elif ssl_context == "adhoc": + ssl_context = generate_adhoc_ssl_context() + + self.socket = ssl_context.wrap_socket(self.socket, server_side=True) + self.ssl_context: t.Optional["ssl.SSLContext"] = ssl_context + else: + self.ssl_context = None + + def log(self, type: str, message: str, *args: t.Any) -> None: + _log(type, message, *args) + + def serve_forever(self, poll_interval: float = 0.5) -> None: + try: + super().serve_forever(poll_interval=poll_interval) + except KeyboardInterrupt: + pass + finally: + self.server_close() + + def handle_error( + self, request: t.Any, client_address: t.Union[t.Tuple[str, int], str] + ) -> None: + if self.passthrough_errors: + raise + + return super().handle_error(request, client_address) + + def log_startup(self) -> None: + """Show information about the address when starting the server.""" + dev_warning = ( + "WARNING: This is a development server. Do not use it in a production" + " deployment. Use a production WSGI server instead." + ) + dev_warning = _ansi_style(dev_warning, "bold", "red") + messages = [dev_warning] + + if self.address_family == af_unix: + messages.append(f" * Running on {self.host}") + else: + scheme = "http" if self.ssl_context is None else "https" + display_hostname = self.host + + if self.host in {"0.0.0.0", "::"}: + messages.append(f" * Running on all addresses ({self.host})") + + if self.host == "0.0.0.0": + localhost = "127.0.0.1" + display_hostname = get_interface_ip(socket.AF_INET) + else: + localhost = "[::1]" + display_hostname = get_interface_ip(socket.AF_INET6) + + messages.append(f" * Running on {scheme}://{localhost}:{self.port}") + + if ":" in display_hostname: + display_hostname = f"[{display_hostname}]" + + messages.append(f" * Running on {scheme}://{display_hostname}:{self.port}") + + _log("info", "\n".join(messages)) + + +class ThreadedWSGIServer(socketserver.ThreadingMixIn, BaseWSGIServer): + """A WSGI server that handles concurrent requests in separate + threads. + + Use :func:`make_server` to create a server instance. + """ + + multithread = True + daemon_threads = True + + +class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): + """A WSGI server that handles concurrent requests in separate forked + processes. + + Use :func:`make_server` to create a server instance. + """ + + multiprocess = True + + def __init__( + self, + host: str, + port: int, + app: "WSGIApplication", + processes: int = 40, + handler: t.Optional[t.Type[WSGIRequestHandler]] = None, + passthrough_errors: bool = False, + ssl_context: t.Optional[_TSSLContextArg] = None, + fd: t.Optional[int] = None, + ) -> None: + if not can_fork: + raise ValueError("Your platform does not support forking.") + + super().__init__(host, port, app, handler, passthrough_errors, ssl_context, fd) + self.max_children = processes + + +def make_server( + host: str, + port: int, + app: "WSGIApplication", + threaded: bool = False, + processes: int = 1, + request_handler: t.Optional[t.Type[WSGIRequestHandler]] = None, + passthrough_errors: bool = False, + ssl_context: t.Optional[_TSSLContextArg] = None, + fd: t.Optional[int] = None, +) -> BaseWSGIServer: + """Create an appropriate WSGI server instance based on the value of + ``threaded`` and ``processes``. + + This is called from :func:`run_simple`, but can be used separately + to have access to the server object, such as to run it in a separate + thread. + + See :func:`run_simple` for parameter docs. + """ + if threaded and processes > 1: + raise ValueError("Cannot have a multi-thread and multi-process server.") + + if threaded: + return ThreadedWSGIServer( + host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd + ) + + if processes > 1: + return ForkingWSGIServer( + host, + port, + app, + processes, + request_handler, + passthrough_errors, + ssl_context, + fd=fd, + ) + + return BaseWSGIServer( + host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd + ) + + +def is_running_from_reloader() -> bool: + """Check if the server is running as a subprocess within the + Werkzeug reloader. + + .. versionadded:: 0.10 + """ + return os.environ.get("WERKZEUG_RUN_MAIN") == "true" + + +def prepare_socket(hostname: str, port: int) -> socket.socket: + """Prepare a socket for use by the WSGI server and reloader. + + The socket is marked inheritable so that it can be kept across + reloads instead of breaking connections. + + Catch errors during bind and show simpler error messages. For + "address already in use", show instructions for resolving the issue, + with special instructions for macOS. + + This is called from :func:`run_simple`, but can be used separately + to control server creation with :func:`make_server`. + """ + address_family = select_address_family(hostname, port) + server_address = get_sockaddr(hostname, port, address_family) + s = socket.socket(address_family, socket.SOCK_STREAM) + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.set_inheritable(True) + + # Remove the socket file if it already exists. + if address_family == af_unix: + server_address = t.cast(str, server_address) + + if os.path.exists(server_address): + os.unlink(server_address) + + # Catch connection issues and show them without the traceback. Show + # extra instructions for address not found, and for macOS. + try: + s.bind(server_address) + except OSError as e: + print(e.strerror, file=sys.stderr) + + if e.errno == errno.EADDRINUSE: + print( + f"Port {port} is in use by another program. Either" + " identify and stop that program, or start the" + " server with a different port.", + file=sys.stderr, + ) + + if sys.platform == "darwin" and port == 5000: + print( + "On macOS, try disabling the 'AirPlay Receiver'" + " service from System Preferences -> Sharing.", + file=sys.stderr, + ) + + sys.exit(1) + + s.listen(LISTEN_QUEUE) + return s + + +def run_simple( + hostname: str, + port: int, + application: "WSGIApplication", + use_reloader: bool = False, + use_debugger: bool = False, + use_evalex: bool = True, + extra_files: t.Optional[t.Iterable[str]] = None, + exclude_patterns: t.Optional[t.Iterable[str]] = None, + reloader_interval: int = 1, + reloader_type: str = "auto", + threaded: bool = False, + processes: int = 1, + request_handler: t.Optional[t.Type[WSGIRequestHandler]] = None, + static_files: t.Optional[t.Dict[str, t.Union[str, t.Tuple[str, str]]]] = None, + passthrough_errors: bool = False, + ssl_context: t.Optional[_TSSLContextArg] = None, +) -> None: + """Start a development server for a WSGI application. Various + optional features can be enabled. + + .. warning:: + + Do not use the development server when deploying to production. + It is intended for use only during local development. It is not + designed to be particularly efficient, stable, or secure. + + :param hostname: The host to bind to, for example ``'localhost'``. + Can be a domain, IPv4 or IPv6 address, or file path starting + with ``unix://`` for a Unix socket. + :param port: The port to bind to, for example ``8080``. Using ``0`` + tells the OS to pick a random free port. + :param application: The WSGI application to run. + :param use_reloader: Use a reloader process to restart the server + process when files are changed. + :param use_debugger: Use Werkzeug's debugger, which will show + formatted tracebacks on unhandled exceptions. + :param use_evalex: Make the debugger interactive. A Python terminal + can be opened for any frame in the traceback. Some protection is + provided by requiring a PIN, but this should never be enabled + on a publicly visible server. + :param extra_files: The reloader will watch these files for changes + in addition to Python modules. For example, watch a + configuration file. + :param exclude_patterns: The reloader will ignore changes to any + files matching these :mod:`fnmatch` patterns. For example, + ignore cache files. + :param reloader_interval: How often the reloader tries to check for + changes. + :param reloader_type: The reloader to use. The ``'stat'`` reloader + is built in, but may require significant CPU to watch files. The + ``'watchdog'`` reloader is much more efficient but requires + installing the ``watchdog`` package first. + :param threaded: Handle concurrent requests using threads. Cannot be + used with ``processes``. + :param processes: Handle concurrent requests using up to this number + of processes. Cannot be used with ``threaded``. + :param request_handler: Use a different + :class:`~BaseHTTPServer.BaseHTTPRequestHandler` subclass to + handle requests. + :param static_files: A dict mapping URL prefixes to directories to + serve static files from using + :class:`~werkzeug.middleware.SharedDataMiddleware`. + :param passthrough_errors: Don't catch unhandled exceptions at the + server level, let the serve crash instead. If ``use_debugger`` + is enabled, the debugger will still catch such errors. + :param ssl_context: Configure TLS to serve over HTTPS. Can be an + :class:`ssl.SSLContext` object, a ``(cert_file, key_file)`` + tuple to create a typical context, or the string ``'adhoc'`` to + generate a temporary self-signed certificate. + + .. versionchanged:: 2.1 + Instructions are shown for dealing with an "address already in + use" error. + + .. versionchanged:: 2.1 + Running on ``0.0.0.0`` or ``::`` shows the loopback IP in + addition to a real IP. + + .. versionchanged:: 2.1 + The command-line interface was removed. + + .. versionchanged:: 2.0 + Running on ``0.0.0.0`` or ``::`` shows a real IP address that + was bound as well as a warning not to run the development server + in production. + + .. versionchanged:: 2.0 + The ``exclude_patterns`` parameter was added. + + .. versionchanged:: 0.15 + Bind to a Unix socket by passing a ``hostname`` that starts with + ``unix://``. + + .. versionchanged:: 0.10 + Improved the reloader and added support for changing the backend + through the ``reloader_type`` parameter. + + .. versionchanged:: 0.9 + A command-line interface was added. + + .. versionchanged:: 0.8 + ``ssl_context`` can be a tuple of paths to the certificate and + private key files. + + .. versionchanged:: 0.6 + The ``ssl_context`` parameter was added. + + .. versionchanged:: 0.5 + The ``static_files`` and ``passthrough_errors`` parameters were + added. + """ + if not isinstance(port, int): + raise TypeError("port must be an integer") + + if static_files: + from .middleware.shared_data import SharedDataMiddleware + + application = SharedDataMiddleware(application, static_files) + + if use_debugger: + from .debug import DebuggedApplication + + application = DebuggedApplication(application, evalex=use_evalex) + + if not is_running_from_reloader(): + s = prepare_socket(hostname, port) + fd = s.fileno() + # Silence a ResourceWarning about an unclosed socket. This object is no longer + # used, the server will create another with fromfd. + s.detach() + os.environ["WERKZEUG_SERVER_FD"] = str(fd) + else: + fd = int(os.environ["WERKZEUG_SERVER_FD"]) + + srv = make_server( + hostname, + port, + application, + threaded, + processes, + request_handler, + passthrough_errors, + ssl_context, + fd=fd, + ) + + if not is_running_from_reloader(): + srv.log_startup() + _log("info", _ansi_style("Press CTRL+C to quit", "yellow")) + + if use_reloader: + from ._reloader import run_with_reloader + + run_with_reloader( + srv.serve_forever, + extra_files=extra_files, + exclude_patterns=exclude_patterns, + interval=reloader_interval, + reloader_type=reloader_type, + ) + else: + srv.serve_forever() diff --git a/venv/lib/python3.8/site-packages/werkzeug/test.py b/venv/lib/python3.8/site-packages/werkzeug/test.py new file mode 100644 index 0000000..edb4d4a --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/test.py @@ -0,0 +1,1337 @@ +import mimetypes +import sys +import typing as t +from collections import defaultdict +from datetime import datetime +from datetime import timedelta +from http.cookiejar import CookieJar +from io import BytesIO +from itertools import chain +from random import random +from tempfile import TemporaryFile +from time import time +from urllib.request import Request as _UrllibRequest + +from ._internal import _get_environ +from ._internal import _make_encode_wrapper +from ._internal import _wsgi_decoding_dance +from ._internal import _wsgi_encoding_dance +from .datastructures import Authorization +from .datastructures import CallbackDict +from .datastructures import CombinedMultiDict +from .datastructures import EnvironHeaders +from .datastructures import FileMultiDict +from .datastructures import Headers +from .datastructures import MultiDict +from .http import dump_cookie +from .http import dump_options_header +from .http import parse_options_header +from .sansio.multipart import Data +from .sansio.multipart import Epilogue +from .sansio.multipart import Field +from .sansio.multipart import File +from .sansio.multipart import MultipartEncoder +from .sansio.multipart import Preamble +from .urls import iri_to_uri +from .urls import url_encode +from .urls import url_fix +from .urls import url_parse +from .urls import url_unparse +from .urls import url_unquote +from .utils import cached_property +from .utils import get_content_type +from .wrappers.request import Request +from .wrappers.response import Response +from .wsgi import ClosingIterator +from .wsgi import get_current_url + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +def stream_encode_multipart( + data: t.Mapping[str, t.Any], + use_tempfile: bool = True, + threshold: int = 1024 * 500, + boundary: t.Optional[str] = None, + charset: str = "utf-8", +) -> t.Tuple[t.IO[bytes], int, str]: + """Encode a dict of values (either strings or file descriptors or + :class:`FileStorage` objects.) into a multipart encoded string stored + in a file descriptor. + """ + if boundary is None: + boundary = f"---------------WerkzeugFormPart_{time()}{random()}" + + stream: t.IO[bytes] = BytesIO() + total_length = 0 + on_disk = False + write_binary: t.Callable[[bytes], int] + + if use_tempfile: + + def write_binary(s: bytes) -> int: + nonlocal stream, total_length, on_disk + + if on_disk: + return stream.write(s) + else: + length = len(s) + + if length + total_length <= threshold: + stream.write(s) + else: + new_stream = t.cast(t.IO[bytes], TemporaryFile("wb+")) + new_stream.write(stream.getvalue()) # type: ignore + new_stream.write(s) + stream = new_stream + on_disk = True + + total_length += length + return length + + else: + write_binary = stream.write + + encoder = MultipartEncoder(boundary.encode()) + write_binary(encoder.send_event(Preamble(data=b""))) + for key, value in _iter_data(data): + reader = getattr(value, "read", None) + if reader is not None: + filename = getattr(value, "filename", getattr(value, "name", None)) + content_type = getattr(value, "content_type", None) + if content_type is None: + content_type = ( + filename + and mimetypes.guess_type(filename)[0] + or "application/octet-stream" + ) + headers = Headers([("Content-Type", content_type)]) + if filename is None: + write_binary(encoder.send_event(Field(name=key, headers=headers))) + else: + write_binary( + encoder.send_event( + File(name=key, filename=filename, headers=headers) + ) + ) + while True: + chunk = reader(16384) + + if not chunk: + break + + write_binary(encoder.send_event(Data(data=chunk, more_data=True))) + else: + if not isinstance(value, str): + value = str(value) + write_binary(encoder.send_event(Field(name=key, headers=Headers()))) + write_binary( + encoder.send_event(Data(data=value.encode(charset), more_data=False)) + ) + + write_binary(encoder.send_event(Epilogue(data=b""))) + + length = stream.tell() + stream.seek(0) + return stream, length, boundary + + +def encode_multipart( + values: t.Mapping[str, t.Any], + boundary: t.Optional[str] = None, + charset: str = "utf-8", +) -> t.Tuple[str, bytes]: + """Like `stream_encode_multipart` but returns a tuple in the form + (``boundary``, ``data``) where data is bytes. + """ + stream, length, boundary = stream_encode_multipart( + values, use_tempfile=False, boundary=boundary, charset=charset + ) + return boundary, stream.read() + + +class _TestCookieHeaders: + """A headers adapter for cookielib""" + + def __init__(self, headers: t.Union[Headers, t.List[t.Tuple[str, str]]]) -> None: + self.headers = headers + + def getheaders(self, name: str) -> t.Iterable[str]: + headers = [] + name = name.lower() + for k, v in self.headers: + if k.lower() == name: + headers.append(v) + return headers + + def get_all( + self, name: str, default: t.Optional[t.Iterable[str]] = None + ) -> t.Iterable[str]: + headers = self.getheaders(name) + + if not headers: + return default # type: ignore + + return headers + + +class _TestCookieResponse: + """Something that looks like a httplib.HTTPResponse, but is actually just an + adapter for our test responses to make them available for cookielib. + """ + + def __init__(self, headers: t.Union[Headers, t.List[t.Tuple[str, str]]]) -> None: + self.headers = _TestCookieHeaders(headers) + + def info(self) -> _TestCookieHeaders: + return self.headers + + +class _TestCookieJar(CookieJar): + """A cookielib.CookieJar modified to inject and read cookie headers from + and to wsgi environments, and wsgi application responses. + """ + + def inject_wsgi(self, environ: "WSGIEnvironment") -> None: + """Inject the cookies as client headers into the server's wsgi + environment. + """ + cvals = [f"{c.name}={c.value}" for c in self] + + if cvals: + environ["HTTP_COOKIE"] = "; ".join(cvals) + else: + environ.pop("HTTP_COOKIE", None) + + def extract_wsgi( + self, + environ: "WSGIEnvironment", + headers: t.Union[Headers, t.List[t.Tuple[str, str]]], + ) -> None: + """Extract the server's set-cookie headers as cookies into the + cookie jar. + """ + self.extract_cookies( + _TestCookieResponse(headers), # type: ignore + _UrllibRequest(get_current_url(environ)), + ) + + +def _iter_data(data: t.Mapping[str, t.Any]) -> t.Iterator[t.Tuple[str, t.Any]]: + """Iterate over a mapping that might have a list of values, yielding + all key, value pairs. Almost like iter_multi_items but only allows + lists, not tuples, of values so tuples can be used for files. + """ + if isinstance(data, MultiDict): + yield from data.items(multi=True) + else: + for key, value in data.items(): + if isinstance(value, list): + for v in value: + yield key, v + else: + yield key, value + + +_TAnyMultiDict = t.TypeVar("_TAnyMultiDict", bound=MultiDict) + + +class EnvironBuilder: + """This class can be used to conveniently create a WSGI environment + for testing purposes. It can be used to quickly create WSGI environments + or request objects from arbitrary data. + + The signature of this class is also used in some other places as of + Werkzeug 0.5 (:func:`create_environ`, :meth:`Response.from_values`, + :meth:`Client.open`). Because of this most of the functionality is + available through the constructor alone. + + Files and regular form data can be manipulated independently of each + other with the :attr:`form` and :attr:`files` attributes, but are + passed with the same argument to the constructor: `data`. + + `data` can be any of these values: + + - a `str` or `bytes` object: The object is converted into an + :attr:`input_stream`, the :attr:`content_length` is set and you have to + provide a :attr:`content_type`. + - a `dict` or :class:`MultiDict`: The keys have to be strings. The values + have to be either any of the following objects, or a list of any of the + following objects: + + - a :class:`file`-like object: These are converted into + :class:`FileStorage` objects automatically. + - a `tuple`: The :meth:`~FileMultiDict.add_file` method is called + with the key and the unpacked `tuple` items as positional + arguments. + - a `str`: The string is set as form data for the associated key. + - a file-like object: The object content is loaded in memory and then + handled like a regular `str` or a `bytes`. + + :param path: the path of the request. In the WSGI environment this will + end up as `PATH_INFO`. If the `query_string` is not defined + and there is a question mark in the `path` everything after + it is used as query string. + :param base_url: the base URL is a URL that is used to extract the WSGI + URL scheme, host (server name + server port) and the + script root (`SCRIPT_NAME`). + :param query_string: an optional string or dict with URL parameters. + :param method: the HTTP method to use, defaults to `GET`. + :param input_stream: an optional input stream. Do not specify this and + `data`. As soon as an input stream is set you can't + modify :attr:`args` and :attr:`files` unless you + set the :attr:`input_stream` to `None` again. + :param content_type: The content type for the request. As of 0.5 you + don't have to provide this when specifying files + and form data via `data`. + :param content_length: The content length for the request. You don't + have to specify this when providing data via + `data`. + :param errors_stream: an optional error stream that is used for + `wsgi.errors`. Defaults to :data:`stderr`. + :param multithread: controls `wsgi.multithread`. Defaults to `False`. + :param multiprocess: controls `wsgi.multiprocess`. Defaults to `False`. + :param run_once: controls `wsgi.run_once`. Defaults to `False`. + :param headers: an optional list or :class:`Headers` object of headers. + :param data: a string or dict of form data or a file-object. + See explanation above. + :param json: An object to be serialized and assigned to ``data``. + Defaults the content type to ``"application/json"``. + Serialized with the function assigned to :attr:`json_dumps`. + :param environ_base: an optional dict of environment defaults. + :param environ_overrides: an optional dict of environment overrides. + :param charset: the charset used to encode string data. + :param auth: An authorization object to use for the + ``Authorization`` header value. A ``(username, password)`` tuple + is a shortcut for ``Basic`` authorization. + + .. versionchanged:: 2.1 + ``CONTENT_TYPE`` and ``CONTENT_LENGTH`` are not duplicated as + header keys in the environ. + + .. versionchanged:: 2.0 + ``REQUEST_URI`` and ``RAW_URI`` is the full raw URI including + the query string, not only the path. + + .. versionchanged:: 2.0 + The default :attr:`request_class` is ``Request`` instead of + ``BaseRequest``. + + .. versionadded:: 2.0 + Added the ``auth`` parameter. + + .. versionadded:: 0.15 + The ``json`` param and :meth:`json_dumps` method. + + .. versionadded:: 0.15 + The environ has keys ``REQUEST_URI`` and ``RAW_URI`` containing + the path before percent-decoding. This is not part of the WSGI + PEP, but many WSGI servers include it. + + .. versionchanged:: 0.6 + ``path`` and ``base_url`` can now be unicode strings that are + encoded with :func:`iri_to_uri`. + """ + + #: the server protocol to use. defaults to HTTP/1.1 + server_protocol = "HTTP/1.1" + + #: the wsgi version to use. defaults to (1, 0) + wsgi_version = (1, 0) + + #: The default request class used by :meth:`get_request`. + request_class = Request + + import json + + #: The serialization function used when ``json`` is passed. + json_dumps = staticmethod(json.dumps) + del json + + _args: t.Optional[MultiDict] + _query_string: t.Optional[str] + _input_stream: t.Optional[t.IO[bytes]] + _form: t.Optional[MultiDict] + _files: t.Optional[FileMultiDict] + + def __init__( + self, + path: str = "/", + base_url: t.Optional[str] = None, + query_string: t.Optional[t.Union[t.Mapping[str, str], str]] = None, + method: str = "GET", + input_stream: t.Optional[t.IO[bytes]] = None, + content_type: t.Optional[str] = None, + content_length: t.Optional[int] = None, + errors_stream: t.Optional[t.IO[str]] = None, + multithread: bool = False, + multiprocess: bool = False, + run_once: bool = False, + headers: t.Optional[t.Union[Headers, t.Iterable[t.Tuple[str, str]]]] = None, + data: t.Optional[ + t.Union[t.IO[bytes], str, bytes, t.Mapping[str, t.Any]] + ] = None, + environ_base: t.Optional[t.Mapping[str, t.Any]] = None, + environ_overrides: t.Optional[t.Mapping[str, t.Any]] = None, + charset: str = "utf-8", + mimetype: t.Optional[str] = None, + json: t.Optional[t.Mapping[str, t.Any]] = None, + auth: t.Optional[t.Union[Authorization, t.Tuple[str, str]]] = None, + ) -> None: + path_s = _make_encode_wrapper(path) + if query_string is not None and path_s("?") in path: + raise ValueError("Query string is defined in the path and as an argument") + request_uri = url_parse(path) + if query_string is None and path_s("?") in path: + query_string = request_uri.query + self.charset = charset + self.path = iri_to_uri(request_uri.path) + self.request_uri = path + if base_url is not None: + base_url = url_fix(iri_to_uri(base_url, charset), charset) + self.base_url = base_url # type: ignore + if isinstance(query_string, (bytes, str)): + self.query_string = query_string + else: + if query_string is None: + query_string = MultiDict() + elif not isinstance(query_string, MultiDict): + query_string = MultiDict(query_string) + self.args = query_string + self.method = method + if headers is None: + headers = Headers() + elif not isinstance(headers, Headers): + headers = Headers(headers) + self.headers = headers + if content_type is not None: + self.content_type = content_type + if errors_stream is None: + errors_stream = sys.stderr + self.errors_stream = errors_stream + self.multithread = multithread + self.multiprocess = multiprocess + self.run_once = run_once + self.environ_base = environ_base + self.environ_overrides = environ_overrides + self.input_stream = input_stream + self.content_length = content_length + self.closed = False + + if auth is not None: + if isinstance(auth, tuple): + auth = Authorization( + "basic", {"username": auth[0], "password": auth[1]} + ) + + self.headers.set("Authorization", auth.to_header()) + + if json is not None: + if data is not None: + raise TypeError("can't provide both json and data") + + data = self.json_dumps(json) + + if self.content_type is None: + self.content_type = "application/json" + + if data: + if input_stream is not None: + raise TypeError("can't provide input stream and data") + if hasattr(data, "read"): + data = data.read() # type: ignore + if isinstance(data, str): + data = data.encode(self.charset) + if isinstance(data, bytes): + self.input_stream = BytesIO(data) + if self.content_length is None: + self.content_length = len(data) + else: + for key, value in _iter_data(data): # type: ignore + if isinstance(value, (tuple, dict)) or hasattr(value, "read"): + self._add_file_from_data(key, value) + else: + self.form.setlistdefault(key).append(value) + + if mimetype is not None: + self.mimetype = mimetype + + @classmethod + def from_environ( + cls, environ: "WSGIEnvironment", **kwargs: t.Any + ) -> "EnvironBuilder": + """Turn an environ dict back into a builder. Any extra kwargs + override the args extracted from the environ. + + .. versionchanged:: 2.0 + Path and query values are passed through the WSGI decoding + dance to avoid double encoding. + + .. versionadded:: 0.15 + """ + headers = Headers(EnvironHeaders(environ)) + out = { + "path": _wsgi_decoding_dance(environ["PATH_INFO"]), + "base_url": cls._make_base_url( + environ["wsgi.url_scheme"], + headers.pop("Host"), + _wsgi_decoding_dance(environ["SCRIPT_NAME"]), + ), + "query_string": _wsgi_decoding_dance(environ["QUERY_STRING"]), + "method": environ["REQUEST_METHOD"], + "input_stream": environ["wsgi.input"], + "content_type": headers.pop("Content-Type", None), + "content_length": headers.pop("Content-Length", None), + "errors_stream": environ["wsgi.errors"], + "multithread": environ["wsgi.multithread"], + "multiprocess": environ["wsgi.multiprocess"], + "run_once": environ["wsgi.run_once"], + "headers": headers, + } + out.update(kwargs) + return cls(**out) + + def _add_file_from_data( + self, + key: str, + value: t.Union[ + t.IO[bytes], t.Tuple[t.IO[bytes], str], t.Tuple[t.IO[bytes], str, str] + ], + ) -> None: + """Called in the EnvironBuilder to add files from the data dict.""" + if isinstance(value, tuple): + self.files.add_file(key, *value) + else: + self.files.add_file(key, value) + + @staticmethod + def _make_base_url(scheme: str, host: str, script_root: str) -> str: + return url_unparse((scheme, host, script_root, "", "")).rstrip("/") + "/" + + @property + def base_url(self) -> str: + """The base URL is used to extract the URL scheme, host name, + port, and root path. + """ + return self._make_base_url(self.url_scheme, self.host, self.script_root) + + @base_url.setter + def base_url(self, value: t.Optional[str]) -> None: + if value is None: + scheme = "http" + netloc = "localhost" + script_root = "" + else: + scheme, netloc, script_root, qs, anchor = url_parse(value) + if qs or anchor: + raise ValueError("base url must not contain a query string or fragment") + self.script_root = script_root.rstrip("/") + self.host = netloc + self.url_scheme = scheme + + @property + def content_type(self) -> t.Optional[str]: + """The content type for the request. Reflected from and to + the :attr:`headers`. Do not set if you set :attr:`files` or + :attr:`form` for auto detection. + """ + ct = self.headers.get("Content-Type") + if ct is None and not self._input_stream: + if self._files: + return "multipart/form-data" + if self._form: + return "application/x-www-form-urlencoded" + return None + return ct + + @content_type.setter + def content_type(self, value: t.Optional[str]) -> None: + if value is None: + self.headers.pop("Content-Type", None) + else: + self.headers["Content-Type"] = value + + @property + def mimetype(self) -> t.Optional[str]: + """The mimetype (content type without charset etc.) + + .. versionadded:: 0.14 + """ + ct = self.content_type + return ct.split(";")[0].strip() if ct else None + + @mimetype.setter + def mimetype(self, value: str) -> None: + self.content_type = get_content_type(value, self.charset) + + @property + def mimetype_params(self) -> t.Mapping[str, str]: + """The mimetype parameters as dict. For example if the + content type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + + .. versionadded:: 0.14 + """ + + def on_update(d: CallbackDict) -> None: + self.headers["Content-Type"] = dump_options_header(self.mimetype, d) + + d = parse_options_header(self.headers.get("content-type", ""))[1] + return CallbackDict(d, on_update) + + @property + def content_length(self) -> t.Optional[int]: + """The content length as integer. Reflected from and to the + :attr:`headers`. Do not set if you set :attr:`files` or + :attr:`form` for auto detection. + """ + return self.headers.get("Content-Length", type=int) + + @content_length.setter + def content_length(self, value: t.Optional[int]) -> None: + if value is None: + self.headers.pop("Content-Length", None) + else: + self.headers["Content-Length"] = str(value) + + def _get_form(self, name: str, storage: t.Type[_TAnyMultiDict]) -> _TAnyMultiDict: + """Common behavior for getting the :attr:`form` and + :attr:`files` properties. + + :param name: Name of the internal cached attribute. + :param storage: Storage class used for the data. + """ + if self.input_stream is not None: + raise AttributeError("an input stream is defined") + + rv = getattr(self, name) + + if rv is None: + rv = storage() + setattr(self, name, rv) + + return rv # type: ignore + + def _set_form(self, name: str, value: MultiDict) -> None: + """Common behavior for setting the :attr:`form` and + :attr:`files` properties. + + :param name: Name of the internal cached attribute. + :param value: Value to assign to the attribute. + """ + self._input_stream = None + setattr(self, name, value) + + @property + def form(self) -> MultiDict: + """A :class:`MultiDict` of form values.""" + return self._get_form("_form", MultiDict) + + @form.setter + def form(self, value: MultiDict) -> None: + self._set_form("_form", value) + + @property + def files(self) -> FileMultiDict: + """A :class:`FileMultiDict` of uploaded files. Use + :meth:`~FileMultiDict.add_file` to add new files. + """ + return self._get_form("_files", FileMultiDict) + + @files.setter + def files(self, value: FileMultiDict) -> None: + self._set_form("_files", value) + + @property + def input_stream(self) -> t.Optional[t.IO[bytes]]: + """An optional input stream. This is mutually exclusive with + setting :attr:`form` and :attr:`files`, setting it will clear + those. Do not provide this if the method is not ``POST`` or + another method that has a body. + """ + return self._input_stream + + @input_stream.setter + def input_stream(self, value: t.Optional[t.IO[bytes]]) -> None: + self._input_stream = value + self._form = None + self._files = None + + @property + def query_string(self) -> str: + """The query string. If you set this to a string + :attr:`args` will no longer be available. + """ + if self._query_string is None: + if self._args is not None: + return url_encode(self._args, charset=self.charset) + return "" + return self._query_string + + @query_string.setter + def query_string(self, value: t.Optional[str]) -> None: + self._query_string = value + self._args = None + + @property + def args(self) -> MultiDict: + """The URL arguments as :class:`MultiDict`.""" + if self._query_string is not None: + raise AttributeError("a query string is defined") + if self._args is None: + self._args = MultiDict() + return self._args + + @args.setter + def args(self, value: t.Optional[MultiDict]) -> None: + self._query_string = None + self._args = value + + @property + def server_name(self) -> str: + """The server name (read-only, use :attr:`host` to set)""" + return self.host.split(":", 1)[0] + + @property + def server_port(self) -> int: + """The server port as integer (read-only, use :attr:`host` to set)""" + pieces = self.host.split(":", 1) + + if len(pieces) == 2: + try: + return int(pieces[1]) + except ValueError: + pass + + if self.url_scheme == "https": + return 443 + return 80 + + def __del__(self) -> None: + try: + self.close() + except Exception: + pass + + def close(self) -> None: + """Closes all files. If you put real :class:`file` objects into the + :attr:`files` dict you can call this method to automatically close + them all in one go. + """ + if self.closed: + return + try: + files = self.files.values() + except AttributeError: + files = () # type: ignore + for f in files: + try: + f.close() + except Exception: + pass + self.closed = True + + def get_environ(self) -> "WSGIEnvironment": + """Return the built environ. + + .. versionchanged:: 0.15 + The content type and length headers are set based on + input stream detection. Previously this only set the WSGI + keys. + """ + input_stream = self.input_stream + content_length = self.content_length + + mimetype = self.mimetype + content_type = self.content_type + + if input_stream is not None: + start_pos = input_stream.tell() + input_stream.seek(0, 2) + end_pos = input_stream.tell() + input_stream.seek(start_pos) + content_length = end_pos - start_pos + elif mimetype == "multipart/form-data": + input_stream, content_length, boundary = stream_encode_multipart( + CombinedMultiDict([self.form, self.files]), charset=self.charset + ) + content_type = f'{mimetype}; boundary="{boundary}"' + elif mimetype == "application/x-www-form-urlencoded": + form_encoded = url_encode(self.form, charset=self.charset).encode("ascii") + content_length = len(form_encoded) + input_stream = BytesIO(form_encoded) + else: + input_stream = BytesIO() + + result: "WSGIEnvironment" = {} + if self.environ_base: + result.update(self.environ_base) + + def _path_encode(x: str) -> str: + return _wsgi_encoding_dance(url_unquote(x, self.charset), self.charset) + + raw_uri = _wsgi_encoding_dance(self.request_uri, self.charset) + result.update( + { + "REQUEST_METHOD": self.method, + "SCRIPT_NAME": _path_encode(self.script_root), + "PATH_INFO": _path_encode(self.path), + "QUERY_STRING": _wsgi_encoding_dance(self.query_string, self.charset), + # Non-standard, added by mod_wsgi, uWSGI + "REQUEST_URI": raw_uri, + # Non-standard, added by gunicorn + "RAW_URI": raw_uri, + "SERVER_NAME": self.server_name, + "SERVER_PORT": str(self.server_port), + "HTTP_HOST": self.host, + "SERVER_PROTOCOL": self.server_protocol, + "wsgi.version": self.wsgi_version, + "wsgi.url_scheme": self.url_scheme, + "wsgi.input": input_stream, + "wsgi.errors": self.errors_stream, + "wsgi.multithread": self.multithread, + "wsgi.multiprocess": self.multiprocess, + "wsgi.run_once": self.run_once, + } + ) + + headers = self.headers.copy() + # Don't send these as headers, they're part of the environ. + headers.remove("Content-Type") + headers.remove("Content-Length") + + if content_type is not None: + result["CONTENT_TYPE"] = content_type + + if content_length is not None: + result["CONTENT_LENGTH"] = str(content_length) + + combined_headers = defaultdict(list) + + for key, value in headers.to_wsgi_list(): + combined_headers[f"HTTP_{key.upper().replace('-', '_')}"].append(value) + + for key, values in combined_headers.items(): + result[key] = ", ".join(values) + + if self.environ_overrides: + result.update(self.environ_overrides) + + return result + + def get_request(self, cls: t.Optional[t.Type[Request]] = None) -> Request: + """Returns a request with the data. If the request class is not + specified :attr:`request_class` is used. + + :param cls: The request wrapper to use. + """ + if cls is None: + cls = self.request_class + + return cls(self.get_environ()) + + +class ClientRedirectError(Exception): + """If a redirect loop is detected when using follow_redirects=True with + the :cls:`Client`, then this exception is raised. + """ + + +class Client: + """This class allows you to send requests to a wrapped application. + + The use_cookies parameter indicates whether cookies should be stored and + sent for subsequent requests. This is True by default, but passing False + will disable this behaviour. + + If you want to request some subdomain of your application you may set + `allow_subdomain_redirects` to `True` as if not no external redirects + are allowed. + + .. versionchanged:: 2.1 + Removed deprecated behavior of treating the response as a + tuple. All data is available as properties on the returned + response object. + + .. versionchanged:: 2.0 + ``response_wrapper`` is always a subclass of + :class:``TestResponse``. + + .. versionchanged:: 0.5 + Added the ``use_cookies`` parameter. + """ + + def __init__( + self, + application: "WSGIApplication", + response_wrapper: t.Optional[t.Type["Response"]] = None, + use_cookies: bool = True, + allow_subdomain_redirects: bool = False, + ) -> None: + self.application = application + + if response_wrapper in {None, Response}: + response_wrapper = TestResponse + elif not isinstance(response_wrapper, TestResponse): + response_wrapper = type( + "WrapperTestResponse", + (TestResponse, response_wrapper), # type: ignore + {}, + ) + + self.response_wrapper = t.cast(t.Type["TestResponse"], response_wrapper) + + if use_cookies: + self.cookie_jar: t.Optional[_TestCookieJar] = _TestCookieJar() + else: + self.cookie_jar = None + + self.allow_subdomain_redirects = allow_subdomain_redirects + + def set_cookie( + self, + server_name: str, + key: str, + value: str = "", + max_age: t.Optional[t.Union[timedelta, int]] = None, + expires: t.Optional[t.Union[str, datetime, int, float]] = None, + path: str = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: t.Optional[str] = None, + charset: str = "utf-8", + ) -> None: + """Sets a cookie in the client's cookie jar. The server name + is required and has to match the one that is also passed to + the open call. + """ + assert self.cookie_jar is not None, "cookies disabled" + header = dump_cookie( + key, + value, + max_age, + expires, + path, + domain, + secure, + httponly, + charset, + samesite=samesite, + ) + environ = create_environ(path, base_url=f"http://{server_name}") + headers = [("Set-Cookie", header)] + self.cookie_jar.extract_wsgi(environ, headers) + + def delete_cookie( + self, + server_name: str, + key: str, + path: str = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: t.Optional[str] = None, + ) -> None: + """Deletes a cookie in the test client.""" + self.set_cookie( + server_name, + key, + expires=0, + max_age=0, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + samesite=samesite, + ) + + def run_wsgi_app( + self, environ: "WSGIEnvironment", buffered: bool = False + ) -> t.Tuple[t.Iterable[bytes], str, Headers]: + """Runs the wrapped WSGI app with the given environment. + + :meta private: + """ + if self.cookie_jar is not None: + self.cookie_jar.inject_wsgi(environ) + + rv = run_wsgi_app(self.application, environ, buffered=buffered) + + if self.cookie_jar is not None: + self.cookie_jar.extract_wsgi(environ, rv[2]) + + return rv + + def resolve_redirect( + self, response: "TestResponse", buffered: bool = False + ) -> "TestResponse": + """Perform a new request to the location given by the redirect + response to the previous request. + + :meta private: + """ + scheme, netloc, path, qs, anchor = url_parse(response.location) + builder = EnvironBuilder.from_environ( + response.request.environ, path=path, query_string=qs + ) + + to_name_parts = netloc.split(":", 1)[0].split(".") + from_name_parts = builder.server_name.split(".") + + if to_name_parts != [""]: + # The new location has a host, use it for the base URL. + builder.url_scheme = scheme + builder.host = netloc + else: + # A local redirect with autocorrect_location_header=False + # doesn't have a host, so use the request's host. + to_name_parts = from_name_parts + + # Explain why a redirect to a different server name won't be followed. + if to_name_parts != from_name_parts: + if to_name_parts[-len(from_name_parts) :] == from_name_parts: + if not self.allow_subdomain_redirects: + raise RuntimeError("Following subdomain redirects is not enabled.") + else: + raise RuntimeError("Following external redirects is not supported.") + + path_parts = path.split("/") + root_parts = builder.script_root.split("/") + + if path_parts[: len(root_parts)] == root_parts: + # Strip the script root from the path. + builder.path = path[len(builder.script_root) :] + else: + # The new location is not under the script root, so use the + # whole path and clear the previous root. + builder.path = path + builder.script_root = "" + + # Only 307 and 308 preserve all of the original request. + if response.status_code not in {307, 308}: + # HEAD is preserved, everything else becomes GET. + if builder.method != "HEAD": + builder.method = "GET" + + # Clear the body and the headers that describe it. + + if builder.input_stream is not None: + builder.input_stream.close() + builder.input_stream = None + + builder.content_type = None + builder.content_length = None + builder.headers.pop("Transfer-Encoding", None) + + return self.open(builder, buffered=buffered) + + def open( + self, + *args: t.Any, + buffered: bool = False, + follow_redirects: bool = False, + **kwargs: t.Any, + ) -> "TestResponse": + """Generate an environ dict from the given arguments, make a + request to the application using it, and return the response. + + :param args: Passed to :class:`EnvironBuilder` to create the + environ for the request. If a single arg is passed, it can + be an existing :class:`EnvironBuilder` or an environ dict. + :param buffered: Convert the iterator returned by the app into + a list. If the iterator has a ``close()`` method, it is + called automatically. + :param follow_redirects: Make additional requests to follow HTTP + redirects until a non-redirect status is returned. + :attr:`TestResponse.history` lists the intermediate + responses. + + .. versionchanged:: 2.1 + Removed the ``as_tuple`` parameter. + + .. versionchanged:: 2.0 + ``as_tuple`` is deprecated and will be removed in Werkzeug + 2.1. Use :attr:`TestResponse.request` and + ``request.environ`` instead. + + .. versionchanged:: 2.0 + The request input stream is closed when calling + ``response.close()``. Input streams for redirects are + automatically closed. + + .. versionchanged:: 0.5 + If a dict is provided as file in the dict for the ``data`` + parameter the content type has to be called ``content_type`` + instead of ``mimetype``. This change was made for + consistency with :class:`werkzeug.FileWrapper`. + + .. versionchanged:: 0.5 + Added the ``follow_redirects`` parameter. + """ + request: t.Optional["Request"] = None + + if not kwargs and len(args) == 1: + arg = args[0] + + if isinstance(arg, EnvironBuilder): + request = arg.get_request() + elif isinstance(arg, dict): + request = EnvironBuilder.from_environ(arg).get_request() + elif isinstance(arg, Request): + request = arg + + if request is None: + builder = EnvironBuilder(*args, **kwargs) + + try: + request = builder.get_request() + finally: + builder.close() + + response = self.run_wsgi_app(request.environ, buffered=buffered) + response = self.response_wrapper(*response, request=request) + + redirects = set() + history: t.List["TestResponse"] = [] + + if not follow_redirects: + return response + + while response.status_code in { + 301, + 302, + 303, + 305, + 307, + 308, + }: + # Exhaust intermediate response bodies to ensure middleware + # that returns an iterator runs any cleanup code. + if not buffered: + response.make_sequence() + response.close() + + new_redirect_entry = (response.location, response.status_code) + + if new_redirect_entry in redirects: + raise ClientRedirectError( + f"Loop detected: A {response.status_code} redirect" + f" to {response.location} was already made." + ) + + redirects.add(new_redirect_entry) + response.history = tuple(history) + history.append(response) + response = self.resolve_redirect(response, buffered=buffered) + else: + # This is the final request after redirects. + response.history = tuple(history) + # Close the input stream when closing the response, in case + # the input is an open temporary file. + response.call_on_close(request.input_stream.close) + return response + + def get(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``GET``.""" + kw["method"] = "GET" + return self.open(*args, **kw) + + def post(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``POST``.""" + kw["method"] = "POST" + return self.open(*args, **kw) + + def put(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``PUT``.""" + kw["method"] = "PUT" + return self.open(*args, **kw) + + def delete(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``DELETE``.""" + kw["method"] = "DELETE" + return self.open(*args, **kw) + + def patch(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``PATCH``.""" + kw["method"] = "PATCH" + return self.open(*args, **kw) + + def options(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``OPTIONS``.""" + kw["method"] = "OPTIONS" + return self.open(*args, **kw) + + def head(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``HEAD``.""" + kw["method"] = "HEAD" + return self.open(*args, **kw) + + def trace(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``TRACE``.""" + kw["method"] = "TRACE" + return self.open(*args, **kw) + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.application!r}>" + + +def create_environ(*args: t.Any, **kwargs: t.Any) -> "WSGIEnvironment": + """Create a new WSGI environ dict based on the values passed. The first + parameter should be the path of the request which defaults to '/'. The + second one can either be an absolute path (in that case the host is + localhost:80) or a full path to the request with scheme, netloc port and + the path to the script. + + This accepts the same arguments as the :class:`EnvironBuilder` + constructor. + + .. versionchanged:: 0.5 + This function is now a thin wrapper over :class:`EnvironBuilder` which + was added in 0.5. The `headers`, `environ_base`, `environ_overrides` + and `charset` parameters were added. + """ + builder = EnvironBuilder(*args, **kwargs) + + try: + return builder.get_environ() + finally: + builder.close() + + +def run_wsgi_app( + app: "WSGIApplication", environ: "WSGIEnvironment", buffered: bool = False +) -> t.Tuple[t.Iterable[bytes], str, Headers]: + """Return a tuple in the form (app_iter, status, headers) of the + application output. This works best if you pass it an application that + returns an iterator all the time. + + Sometimes applications may use the `write()` callable returned + by the `start_response` function. This tries to resolve such edge + cases automatically. But if you don't get the expected output you + should set `buffered` to `True` which enforces buffering. + + If passed an invalid WSGI application the behavior of this function is + undefined. Never pass non-conforming WSGI applications to this function. + + :param app: the application to execute. + :param buffered: set to `True` to enforce buffering. + :return: tuple in the form ``(app_iter, status, headers)`` + """ + # Copy environ to ensure any mutations by the app (ProxyFix, for + # example) don't affect subsequent requests (such as redirects). + environ = _get_environ(environ).copy() + status: str + response: t.Optional[t.Tuple[str, t.List[t.Tuple[str, str]]]] = None + buffer: t.List[bytes] = [] + + def start_response(status, headers, exc_info=None): # type: ignore + nonlocal response + + if exc_info: + try: + raise exc_info[1].with_traceback(exc_info[2]) + finally: + exc_info = None + + response = (status, headers) + return buffer.append + + app_rv = app(environ, start_response) + close_func = getattr(app_rv, "close", None) + app_iter: t.Iterable[bytes] = iter(app_rv) + + # when buffering we emit the close call early and convert the + # application iterator into a regular list + if buffered: + try: + app_iter = list(app_iter) + finally: + if close_func is not None: + close_func() + + # otherwise we iterate the application iter until we have a response, chain + # the already received data with the already collected data and wrap it in + # a new `ClosingIterator` if we need to restore a `close` callable from the + # original return value. + else: + for item in app_iter: + buffer.append(item) + + if response is not None: + break + + if buffer: + app_iter = chain(buffer, app_iter) + + if close_func is not None and app_iter is not app_rv: + app_iter = ClosingIterator(app_iter, close_func) + + status, headers = response # type: ignore + return app_iter, status, Headers(headers) + + +class TestResponse(Response): + """:class:`~werkzeug.wrappers.Response` subclass that provides extra + information about requests made with the test :class:`Client`. + + Test client requests will always return an instance of this class. + If a custom response class is passed to the client, it is + subclassed along with this to support test information. + + If the test request included large files, or if the application is + serving a file, call :meth:`close` to close any open files and + prevent Python showing a ``ResourceWarning``. + + .. versionchanged:: 2.2 + Set the ``default_mimetype`` to None to prevent a mimetype being + assumed if missing. + + .. versionchanged:: 2.1 + Removed deprecated behavior for treating the response instance + as a tuple. + + .. versionadded:: 2.0 + Test client methods always return instances of this class. + """ + + default_mimetype = None + # Don't assume a mimetype, instead use whatever the response provides + + request: Request + """A request object with the environ used to make the request that + resulted in this response. + """ + + history: t.Tuple["TestResponse", ...] + """A list of intermediate responses. Populated when the test request + is made with ``follow_redirects`` enabled. + """ + + # Tell Pytest to ignore this, it's not a test class. + __test__ = False + + def __init__( + self, + response: t.Iterable[bytes], + status: str, + headers: Headers, + request: Request, + history: t.Tuple["TestResponse"] = (), # type: ignore + **kwargs: t.Any, + ) -> None: + super().__init__(response, status, headers, **kwargs) + self.request = request + self.history = history + self._compat_tuple = response, status, headers + + @cached_property + def text(self) -> str: + """The response data as text. A shortcut for + ``response.get_data(as_text=True)``. + + .. versionadded:: 2.1 + """ + return self.get_data(as_text=True) diff --git a/venv/lib/python3.8/site-packages/werkzeug/testapp.py b/venv/lib/python3.8/site-packages/werkzeug/testapp.py new file mode 100644 index 0000000..0d7ffbb --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/testapp.py @@ -0,0 +1,241 @@ +"""A small application that can be used to test a WSGI server and check +it for WSGI compliance. +""" +import base64 +import os +import sys +import typing as t +from textwrap import wrap + +from markupsafe import escape + +from . import __version__ as _werkzeug_version +from .wrappers.request import Request +from .wrappers.response import Response + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIEnvironment + + +logo = Response( + base64.b64decode( + """ +R0lGODlhoACgAOMIAAEDACwpAEpCAGdgAJaKAM28AOnVAP3rAP///////// +//////////////////////yH5BAEKAAgALAAAAACgAKAAAAT+EMlJq704680R+F0ojmRpnuj0rWnrv +nB8rbRs33gu0bzu/0AObxgsGn3D5HHJbCUFyqZ0ukkSDlAidctNFg7gbI9LZlrBaHGtzAae0eloe25 +7w9EDOX2fst/xenyCIn5/gFqDiVVDV4aGeYiKkhSFjnCQY5OTlZaXgZp8nJ2ekaB0SQOjqphrpnOiq +ncEn65UsLGytLVmQ6m4sQazpbtLqL/HwpnER8bHyLrLOc3Oz8PRONPU1crXN9na263dMt/g4SzjMeX +m5yDpLqgG7OzJ4u8lT/P69ej3JPn69kHzN2OIAHkB9RUYSFCFQYQJFTIkCDBiwoXWGnowaLEjRm7+G +p9A7Hhx4rUkAUaSLJlxHMqVMD/aSycSZkyTplCqtGnRAM5NQ1Ly5OmzZc6gO4d6DGAUKA+hSocWYAo +SlM6oUWX2O/o0KdaVU5vuSQLAa0ADwQgMEMB2AIECZhVSnTno6spgbtXmHcBUrQACcc2FrTrWS8wAf +78cMFBgwIBgbN+qvTt3ayikRBk7BoyGAGABAdYyfdzRQGV3l4coxrqQ84GpUBmrdR3xNIDUPAKDBSA +ADIGDhhqTZIWaDcrVX8EsbNzbkvCOxG8bN5w8ly9H8jyTJHC6DFndQydbguh2e/ctZJFXRxMAqqPVA +tQH5E64SPr1f0zz7sQYjAHg0In+JQ11+N2B0XXBeeYZgBZFx4tqBToiTCPv0YBgQv8JqA6BEf6RhXx +w1ENhRBnWV8ctEX4Ul2zc3aVGcQNC2KElyTDYyYUWvShdjDyMOGMuFjqnII45aogPhz/CodUHFwaDx +lTgsaOjNyhGWJQd+lFoAGk8ObghI0kawg+EV5blH3dr+digkYuAGSaQZFHFz2P/cTaLmhF52QeSb45 +Jwxd+uSVGHlqOZpOeJpCFZ5J+rkAkFjQ0N1tah7JJSZUFNsrkeJUJMIBi8jyaEKIhKPomnC91Uo+NB +yyaJ5umnnpInIFh4t6ZSpGaAVmizqjpByDegYl8tPE0phCYrhcMWSv+uAqHfgH88ak5UXZmlKLVJhd +dj78s1Fxnzo6yUCrV6rrDOkluG+QzCAUTbCwf9SrmMLzK6p+OPHx7DF+bsfMRq7Ec61Av9i6GLw23r +idnZ+/OO0a99pbIrJkproCQMA17OPG6suq3cca5ruDfXCCDoS7BEdvmJn5otdqscn+uogRHHXs8cbh +EIfYaDY1AkrC0cqwcZpnM6ludx72x0p7Fo/hZAcpJDjax0UdHavMKAbiKltMWCF3xxh9k25N/Viud8 +ba78iCvUkt+V6BpwMlErmcgc502x+u1nSxJSJP9Mi52awD1V4yB/QHONsnU3L+A/zR4VL/indx/y64 +gqcj+qgTeweM86f0Qy1QVbvmWH1D9h+alqg254QD8HJXHvjQaGOqEqC22M54PcftZVKVSQG9jhkv7C +JyTyDoAJfPdu8v7DRZAxsP/ky9MJ3OL36DJfCFPASC3/aXlfLOOON9vGZZHydGf8LnxYJuuVIbl83y +Az5n/RPz07E+9+zw2A2ahz4HxHo9Kt79HTMx1Q7ma7zAzHgHqYH0SoZWyTuOLMiHwSfZDAQTn0ajk9 +YQqodnUYjByQZhZak9Wu4gYQsMyEpIOAOQKze8CmEF45KuAHTvIDOfHJNipwoHMuGHBnJElUoDmAyX +c2Qm/R8Ah/iILCCJOEokGowdhDYc/yoL+vpRGwyVSCWFYZNljkhEirGXsalWcAgOdeAdoXcktF2udb +qbUhjWyMQxYO01o6KYKOr6iK3fE4MaS+DsvBsGOBaMb0Y6IxADaJhFICaOLmiWTlDAnY1KzDG4ambL +cWBA8mUzjJsN2KjSaSXGqMCVXYpYkj33mcIApyhQf6YqgeNAmNvuC0t4CsDbSshZJkCS1eNisKqlyG +cF8G2JeiDX6tO6Mv0SmjCa3MFb0bJaGPMU0X7c8XcpvMaOQmCajwSeY9G0WqbBmKv34DsMIEztU6Y2 +KiDlFdt6jnCSqx7Dmt6XnqSKaFFHNO5+FmODxMCWBEaco77lNDGXBM0ECYB/+s7nKFdwSF5hgXumQe +EZ7amRg39RHy3zIjyRCykQh8Zo2iviRKyTDn/zx6EefptJj2Cw+Ep2FSc01U5ry4KLPYsTyWnVGnvb +UpyGlhjBUljyjHhWpf8OFaXwhp9O4T1gU9UeyPPa8A2l0p1kNqPXEVRm1AOs1oAGZU596t6SOR2mcB +Oco1srWtkaVrMUzIErrKri85keKqRQYX9VX0/eAUK1hrSu6HMEX3Qh2sCh0q0D2CtnUqS4hj62sE/z +aDs2Sg7MBS6xnQeooc2R2tC9YrKpEi9pLXfYXp20tDCpSP8rKlrD4axprb9u1Df5hSbz9QU0cRpfgn +kiIzwKucd0wsEHlLpe5yHXuc6FrNelOl7pY2+11kTWx7VpRu97dXA3DO1vbkhcb4zyvERYajQgAADs +=""" + ), + mimetype="image/png", +) + + +TEMPLATE = """\ + + +WSGI Information + +
    + +

    WSGI Information

    +

    + This page displays all available information about the WSGI server and + the underlying Python interpreter. +

    Python Interpreter

    + + + + + + +
    Python Version + %(python_version)s +
    Platform + %(platform)s [%(os)s] +
    API Version + %(api_version)s +
    Byteorder + %(byteorder)s +
    Werkzeug Version + %(werkzeug_version)s +
    +

    WSGI Environment

    + %(wsgi_env)s
    +

    Installed Eggs

    +

    + The following python packages were installed on the system as + Python eggs: +

      %(python_eggs)s
    +

    System Path

    +

    + The following paths are the current contents of the load path. The + following entries are looked up for Python packages. Note that not + all items in this path are folders. Gray and underlined items are + entries pointing to invalid resources or used by custom import hooks + such as the zip importer. +

    + Items with a bright background were expanded for display from a relative + path. If you encounter such paths in the output you might want to check + your setup as relative paths are usually problematic in multithreaded + environments. +

      %(sys_path)s
    +
    +""" + + +def iter_sys_path() -> t.Iterator[t.Tuple[str, bool, bool]]: + if os.name == "posix": + + def strip(x: str) -> str: + prefix = os.path.expanduser("~") + if x.startswith(prefix): + x = f"~{x[len(prefix) :]}" + return x + + else: + + def strip(x: str) -> str: + return x + + cwd = os.path.abspath(os.getcwd()) + for item in sys.path: + path = os.path.join(cwd, item or os.path.curdir) + yield strip(os.path.normpath(path)), not os.path.isdir(path), path != item + + +def render_testapp(req: Request) -> bytes: + try: + import pkg_resources + except ImportError: + eggs: t.Iterable[t.Any] = () + else: + eggs = sorted( + pkg_resources.working_set, + key=lambda x: x.project_name.lower(), # type: ignore + ) + python_eggs = [] + for egg in eggs: + try: + version = egg.version + except (ValueError, AttributeError): + version = "unknown" + python_eggs.append( + f"
  • {escape(egg.project_name)} [{escape(version)}]" + ) + + wsgi_env = [] + sorted_environ = sorted(req.environ.items(), key=lambda x: repr(x[0]).lower()) + for key, value in sorted_environ: + value = "".join(wrap(str(escape(repr(value))))) + wsgi_env.append(f"{escape(key)}{value}") + + sys_path = [] + for item, virtual, expanded in iter_sys_path(): + class_ = [] + if virtual: + class_.append("virtual") + if expanded: + class_.append("exp") + class_ = f' class="{" ".join(class_)}"' if class_ else "" + sys_path.append(f"{escape(item)}") + + return ( + TEMPLATE + % { + "python_version": "
    ".join(escape(sys.version).splitlines()), + "platform": escape(sys.platform), + "os": escape(os.name), + "api_version": sys.api_version, + "byteorder": sys.byteorder, + "werkzeug_version": _werkzeug_version, + "python_eggs": "\n".join(python_eggs), + "wsgi_env": "\n".join(wsgi_env), + "sys_path": "\n".join(sys_path), + } + ).encode("utf-8") + + +def test_app( + environ: "WSGIEnvironment", start_response: "StartResponse" +) -> t.Iterable[bytes]: + """Simple test application that dumps the environment. You can use + it to check if Werkzeug is working properly: + + .. sourcecode:: pycon + + >>> from werkzeug.serving import run_simple + >>> from werkzeug.testapp import test_app + >>> run_simple('localhost', 3000, test_app) + * Running on http://localhost:3000/ + + The application displays important information from the WSGI environment, + the Python interpreter and the installed libraries. + """ + req = Request(environ, populate_request=False) + if req.args.get("resource") == "logo": + response = logo + else: + response = Response(render_testapp(req), mimetype="text/html") + return response(environ, start_response) + + +if __name__ == "__main__": + from .serving import run_simple + + run_simple("localhost", 5000, test_app, use_reloader=True) diff --git a/venv/lib/python3.8/site-packages/werkzeug/urls.py b/venv/lib/python3.8/site-packages/werkzeug/urls.py new file mode 120000 index 0000000..3c23477 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/urls.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/43/d4/a2/f9e561ef2c64debc24cebc06466d78e855d55e083d9413f79341d47d53 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/user_agent.py b/venv/lib/python3.8/site-packages/werkzeug/user_agent.py new file mode 120000 index 0000000..0a0549f --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/user_agent.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/59/c9/59/869be02eeacc178e61b22a126d2ef91f565be2231018a377ff2676a0aa \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/utils.py b/venv/lib/python3.8/site-packages/werkzeug/utils.py new file mode 100644 index 0000000..672e6e5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/utils.py @@ -0,0 +1,705 @@ +import io +import mimetypes +import os +import pkgutil +import re +import sys +import typing as t +import unicodedata +from datetime import datetime +from time import time +from zlib import adler32 + +from markupsafe import escape + +from ._internal import _DictAccessorProperty +from ._internal import _missing +from ._internal import _TAccessorValue +from .datastructures import Headers +from .exceptions import NotFound +from .exceptions import RequestedRangeNotSatisfiable +from .security import safe_join +from .urls import url_quote +from .wsgi import wrap_file + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIEnvironment + from .wrappers.request import Request + from .wrappers.response import Response + +_T = t.TypeVar("_T") + +_entity_re = re.compile(r"&([^;]+);") +_filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]") +_windows_device_files = ( + "CON", + "AUX", + "COM1", + "COM2", + "COM3", + "COM4", + "LPT1", + "LPT2", + "LPT3", + "PRN", + "NUL", +) + + +class cached_property(property, t.Generic[_T]): + """A :func:`property` that is only evaluated once. Subsequent access + returns the cached value. Setting the property sets the cached + value. Deleting the property clears the cached value, accessing it + again will evaluate it again. + + .. code-block:: python + + class Example: + @cached_property + def value(self): + # calculate something important here + return 42 + + e = Example() + e.value # evaluates + e.value # uses cache + e.value = 16 # sets cache + del e.value # clears cache + + If the class defines ``__slots__``, it must add ``_cache_{name}`` as + a slot. Alternatively, it can add ``__dict__``, but that's usually + not desirable. + + .. versionchanged:: 2.1 + Works with ``__slots__``. + + .. versionchanged:: 2.0 + ``del obj.name`` clears the cached value. + """ + + def __init__( + self, + fget: t.Callable[[t.Any], _T], + name: t.Optional[str] = None, + doc: t.Optional[str] = None, + ) -> None: + super().__init__(fget, doc=doc) + self.__name__ = name or fget.__name__ + self.slot_name = f"_cache_{self.__name__}" + self.__module__ = fget.__module__ + + def __set__(self, obj: object, value: _T) -> None: + if hasattr(obj, "__dict__"): + obj.__dict__[self.__name__] = value + else: + setattr(obj, self.slot_name, value) + + def __get__(self, obj: object, type: type = None) -> _T: # type: ignore + if obj is None: + return self # type: ignore + + obj_dict = getattr(obj, "__dict__", None) + + if obj_dict is not None: + value: _T = obj_dict.get(self.__name__, _missing) + else: + value = getattr(obj, self.slot_name, _missing) # type: ignore[arg-type] + + if value is _missing: + value = self.fget(obj) # type: ignore + + if obj_dict is not None: + obj.__dict__[self.__name__] = value + else: + setattr(obj, self.slot_name, value) + + return value + + def __delete__(self, obj: object) -> None: + if hasattr(obj, "__dict__"): + del obj.__dict__[self.__name__] + else: + setattr(obj, self.slot_name, _missing) + + +class environ_property(_DictAccessorProperty[_TAccessorValue]): + """Maps request attributes to environment variables. This works not only + for the Werkzeug request object, but also any other class with an + environ attribute: + + >>> class Test(object): + ... environ = {'key': 'value'} + ... test = environ_property('key') + >>> var = Test() + >>> var.test + 'value' + + If you pass it a second value it's used as default if the key does not + exist, the third one can be a converter that takes a value and converts + it. If it raises :exc:`ValueError` or :exc:`TypeError` the default value + is used. If no default value is provided `None` is used. + + Per default the property is read only. You have to explicitly enable it + by passing ``read_only=False`` to the constructor. + """ + + read_only = True + + def lookup(self, obj: "Request") -> "WSGIEnvironment": + return obj.environ + + +class header_property(_DictAccessorProperty[_TAccessorValue]): + """Like `environ_property` but for headers.""" + + def lookup(self, obj: t.Union["Request", "Response"]) -> Headers: + return obj.headers + + +# https://cgit.freedesktop.org/xdg/shared-mime-info/tree/freedesktop.org.xml.in +# https://www.iana.org/assignments/media-types/media-types.xhtml +# Types listed in the XDG mime info that have a charset in the IANA registration. +_charset_mimetypes = { + "application/ecmascript", + "application/javascript", + "application/sql", + "application/xml", + "application/xml-dtd", + "application/xml-external-parsed-entity", +} + + +def get_content_type(mimetype: str, charset: str) -> str: + """Returns the full content type string with charset for a mimetype. + + If the mimetype represents text, the charset parameter will be + appended, otherwise the mimetype is returned unchanged. + + :param mimetype: The mimetype to be used as content type. + :param charset: The charset to be appended for text mimetypes. + :return: The content type. + + .. versionchanged:: 0.15 + Any type that ends with ``+xml`` gets a charset, not just those + that start with ``application/``. Known text types such as + ``application/javascript`` are also given charsets. + """ + if ( + mimetype.startswith("text/") + or mimetype in _charset_mimetypes + or mimetype.endswith("+xml") + ): + mimetype += f"; charset={charset}" + + return mimetype + + +def secure_filename(filename: str) -> str: + r"""Pass it a filename and it will return a secure version of it. This + filename can then safely be stored on a regular file system and passed + to :func:`os.path.join`. The filename returned is an ASCII only string + for maximum portability. + + On windows systems the function also makes sure that the file is not + named after one of the special device files. + + >>> secure_filename("My cool movie.mov") + 'My_cool_movie.mov' + >>> secure_filename("../../../etc/passwd") + 'etc_passwd' + >>> secure_filename('i contain cool \xfcml\xe4uts.txt') + 'i_contain_cool_umlauts.txt' + + The function might return an empty filename. It's your responsibility + to ensure that the filename is unique and that you abort or + generate a random filename if the function returned an empty one. + + .. versionadded:: 0.5 + + :param filename: the filename to secure + """ + filename = unicodedata.normalize("NFKD", filename) + filename = filename.encode("ascii", "ignore").decode("ascii") + + for sep in os.path.sep, os.path.altsep: + if sep: + filename = filename.replace(sep, " ") + filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip( + "._" + ) + + # on nt a couple of special files are present in each folder. We + # have to ensure that the target file is not such a filename. In + # this case we prepend an underline + if ( + os.name == "nt" + and filename + and filename.split(".")[0].upper() in _windows_device_files + ): + filename = f"_{filename}" + + return filename + + +def redirect( + location: str, code: int = 302, Response: t.Optional[t.Type["Response"]] = None +) -> "Response": + """Returns a response object (a WSGI application) that, if called, + redirects the client to the target location. Supported codes are + 301, 302, 303, 305, 307, and 308. 300 is not supported because + it's not a real redirect and 304 because it's the answer for a + request with a request with defined If-Modified-Since headers. + + .. versionadded:: 0.6 + The location can now be a unicode string that is encoded using + the :func:`iri_to_uri` function. + + .. versionadded:: 0.10 + The class used for the Response object can now be passed in. + + :param location: the location the response should redirect to. + :param code: the redirect status code. defaults to 302. + :param class Response: a Response class to use when instantiating a + response. The default is :class:`werkzeug.wrappers.Response` if + unspecified. + """ + if Response is None: + from .wrappers import Response # type: ignore + + display_location = escape(location) + if isinstance(location, str): + # Safe conversion is necessary here as we might redirect + # to a broken URI scheme (for instance itms-services). + from .urls import iri_to_uri + + location = iri_to_uri(location, safe_conversion=True) + + response = Response( # type: ignore + "\n" + "\n" + "Redirecting...\n" + "

    Redirecting...

    \n" + "

    You should be redirected automatically to the target URL: " + f'{display_location}. If' + " not, click the link.\n", + code, + mimetype="text/html", + ) + response.headers["Location"] = location + return response + + +def append_slash_redirect(environ: "WSGIEnvironment", code: int = 308) -> "Response": + """Redirect to the current URL with a slash appended. + + If the current URL is ``/user/42``, the redirect URL will be + ``42/``. When joined to the current URL during response + processing or by the browser, this will produce ``/user/42/``. + + The behavior is undefined if the path ends with a slash already. If + called unconditionally on a URL, it may produce a redirect loop. + + :param environ: Use the path and query from this WSGI environment + to produce the redirect URL. + :param code: the status code for the redirect. + + .. versionchanged:: 2.1 + Produce a relative URL that only modifies the last segment. + Relevant when the current path has multiple segments. + + .. versionchanged:: 2.1 + The default status code is 308 instead of 301. This preserves + the request method and body. + """ + tail = environ["PATH_INFO"].rpartition("/")[2] + + if not tail: + new_path = "./" + else: + new_path = f"{tail}/" + + query_string = environ.get("QUERY_STRING") + + if query_string: + new_path = f"{new_path}?{query_string}" + + return redirect(new_path, code) + + +def send_file( + path_or_file: t.Union[os.PathLike, str, t.IO[bytes]], + environ: "WSGIEnvironment", + mimetype: t.Optional[str] = None, + as_attachment: bool = False, + download_name: t.Optional[str] = None, + conditional: bool = True, + etag: t.Union[bool, str] = True, + last_modified: t.Optional[t.Union[datetime, int, float]] = None, + max_age: t.Optional[ + t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]] + ] = None, + use_x_sendfile: bool = False, + response_class: t.Optional[t.Type["Response"]] = None, + _root_path: t.Optional[t.Union[os.PathLike, str]] = None, +) -> "Response": + """Send the contents of a file to the client. + + The first argument can be a file path or a file-like object. Paths + are preferred in most cases because Werkzeug can manage the file and + get extra information from the path. Passing a file-like object + requires that the file is opened in binary mode, and is mostly + useful when building a file in memory with :class:`io.BytesIO`. + + Never pass file paths provided by a user. The path is assumed to be + trusted, so a user could craft a path to access a file you didn't + intend. + + If the WSGI server sets a ``file_wrapper`` in ``environ``, it is + used, otherwise Werkzeug's built-in wrapper is used. Alternatively, + if the HTTP server supports ``X-Sendfile``, ``use_x_sendfile=True`` + will tell the server to send the given path, which is much more + efficient than reading it in Python. + + :param path_or_file: The path to the file to send, relative to the + current working directory if a relative path is given. + Alternatively, a file-like object opened in binary mode. Make + sure the file pointer is seeked to the start of the data. + :param environ: The WSGI environ for the current request. + :param mimetype: The MIME type to send for the file. If not + provided, it will try to detect it from the file name. + :param as_attachment: Indicate to a browser that it should offer to + save the file instead of displaying it. + :param download_name: The default name browsers will use when saving + the file. Defaults to the passed file name. + :param conditional: Enable conditional and range responses based on + request headers. Requires passing a file path and ``environ``. + :param etag: Calculate an ETag for the file, which requires passing + a file path. Can also be a string to use instead. + :param last_modified: The last modified time to send for the file, + in seconds. If not provided, it will try to detect it from the + file path. + :param max_age: How long the client should cache the file, in + seconds. If set, ``Cache-Control`` will be ``public``, otherwise + it will be ``no-cache`` to prefer conditional caching. + :param use_x_sendfile: Set the ``X-Sendfile`` header to let the + server to efficiently send the file. Requires support from the + HTTP server. Requires passing a file path. + :param response_class: Build the response using this class. Defaults + to :class:`~werkzeug.wrappers.Response`. + :param _root_path: Do not use. For internal use only. Use + :func:`send_from_directory` to safely send files under a path. + + .. versionchanged:: 2.0.2 + ``send_file`` only sets a detected ``Content-Encoding`` if + ``as_attachment`` is disabled. + + .. versionadded:: 2.0 + Adapted from Flask's implementation. + + .. versionchanged:: 2.0 + ``download_name`` replaces Flask's ``attachment_filename`` + parameter. If ``as_attachment=False``, it is passed with + ``Content-Disposition: inline`` instead. + + .. versionchanged:: 2.0 + ``max_age`` replaces Flask's ``cache_timeout`` parameter. + ``conditional`` is enabled and ``max_age`` is not set by + default. + + .. versionchanged:: 2.0 + ``etag`` replaces Flask's ``add_etags`` parameter. It can be a + string to use instead of generating one. + + .. versionchanged:: 2.0 + If an encoding is returned when guessing ``mimetype`` from + ``download_name``, set the ``Content-Encoding`` header. + """ + if response_class is None: + from .wrappers import Response + + response_class = Response + + path: t.Optional[str] = None + file: t.Optional[t.IO[bytes]] = None + size: t.Optional[int] = None + mtime: t.Optional[float] = None + headers = Headers() + + if isinstance(path_or_file, (os.PathLike, str)) or hasattr( + path_or_file, "__fspath__" + ): + path_or_file = t.cast(t.Union[os.PathLike, str], path_or_file) + + # Flask will pass app.root_path, allowing its send_file wrapper + # to not have to deal with paths. + if _root_path is not None: + path = os.path.join(_root_path, path_or_file) + else: + path = os.path.abspath(path_or_file) + + stat = os.stat(path) + size = stat.st_size + mtime = stat.st_mtime + else: + file = path_or_file + + if download_name is None and path is not None: + download_name = os.path.basename(path) + + if mimetype is None: + if download_name is None: + raise TypeError( + "Unable to detect the MIME type because a file name is" + " not available. Either set 'download_name', pass a" + " path instead of a file, or set 'mimetype'." + ) + + mimetype, encoding = mimetypes.guess_type(download_name) + + if mimetype is None: + mimetype = "application/octet-stream" + + # Don't send encoding for attachments, it causes browsers to + # save decompress tar.gz files. + if encoding is not None and not as_attachment: + headers.set("Content-Encoding", encoding) + + if download_name is not None: + try: + download_name.encode("ascii") + except UnicodeEncodeError: + simple = unicodedata.normalize("NFKD", download_name) + simple = simple.encode("ascii", "ignore").decode("ascii") + quoted = url_quote(download_name, safe="") + names = {"filename": simple, "filename*": f"UTF-8''{quoted}"} + else: + names = {"filename": download_name} + + value = "attachment" if as_attachment else "inline" + headers.set("Content-Disposition", value, **names) + elif as_attachment: + raise TypeError( + "No name provided for attachment. Either set" + " 'download_name' or pass a path instead of a file." + ) + + if use_x_sendfile and path is not None: + headers["X-Sendfile"] = path + data = None + else: + if file is None: + file = open(path, "rb") # type: ignore + elif isinstance(file, io.BytesIO): + size = file.getbuffer().nbytes + elif isinstance(file, io.TextIOBase): + raise ValueError("Files must be opened in binary mode or use BytesIO.") + + data = wrap_file(environ, file) + + rv = response_class( + data, mimetype=mimetype, headers=headers, direct_passthrough=True + ) + + if size is not None: + rv.content_length = size + + if last_modified is not None: + rv.last_modified = last_modified # type: ignore + elif mtime is not None: + rv.last_modified = mtime # type: ignore + + rv.cache_control.no_cache = True + + # Flask will pass app.get_send_file_max_age, allowing its send_file + # wrapper to not have to deal with paths. + if callable(max_age): + max_age = max_age(path) + + if max_age is not None: + if max_age > 0: + rv.cache_control.no_cache = None + rv.cache_control.public = True + + rv.cache_control.max_age = max_age + rv.expires = int(time() + max_age) # type: ignore + + if isinstance(etag, str): + rv.set_etag(etag) + elif etag and path is not None: + check = adler32(path.encode("utf-8")) & 0xFFFFFFFF + rv.set_etag(f"{mtime}-{size}-{check}") + + if conditional: + try: + rv = rv.make_conditional(environ, accept_ranges=True, complete_length=size) + except RequestedRangeNotSatisfiable: + if file is not None: + file.close() + + raise + + # Some x-sendfile implementations incorrectly ignore the 304 + # status code and send the file anyway. + if rv.status_code == 304: + rv.headers.pop("x-sendfile", None) + + return rv + + +def send_from_directory( + directory: t.Union[os.PathLike, str], + path: t.Union[os.PathLike, str], + environ: "WSGIEnvironment", + **kwargs: t.Any, +) -> "Response": + """Send a file from within a directory using :func:`send_file`. + + This is a secure way to serve files from a folder, such as static + files or uploads. Uses :func:`~werkzeug.security.safe_join` to + ensure the path coming from the client is not maliciously crafted to + point outside the specified directory. + + If the final path does not point to an existing regular file, + returns a 404 :exc:`~werkzeug.exceptions.NotFound` error. + + :param directory: The directory that ``path`` must be located under. + :param path: The path to the file to send, relative to + ``directory``. + :param environ: The WSGI environ for the current request. + :param kwargs: Arguments to pass to :func:`send_file`. + + .. versionadded:: 2.0 + Adapted from Flask's implementation. + """ + path = safe_join(os.fspath(directory), os.fspath(path)) + + if path is None: + raise NotFound() + + # Flask will pass app.root_path, allowing its send_from_directory + # wrapper to not have to deal with paths. + if "_root_path" in kwargs: + path = os.path.join(kwargs["_root_path"], path) + + try: + if not os.path.isfile(path): + raise NotFound() + except ValueError: + # path contains null byte on Python < 3.8 + raise NotFound() from None + + return send_file(path, environ, **kwargs) + + +def import_string(import_name: str, silent: bool = False) -> t.Any: + """Imports an object based on a string. This is useful if you want to + use import paths as endpoints or something similar. An import path can + be specified either in dotted notation (``xml.sax.saxutils.escape``) + or with a colon as object delimiter (``xml.sax.saxutils:escape``). + + If `silent` is True the return value will be `None` if the import fails. + + :param import_name: the dotted name for the object to import. + :param silent: if set to `True` import errors are ignored and + `None` is returned instead. + :return: imported object + """ + import_name = import_name.replace(":", ".") + try: + try: + __import__(import_name) + except ImportError: + if "." not in import_name: + raise + else: + return sys.modules[import_name] + + module_name, obj_name = import_name.rsplit(".", 1) + module = __import__(module_name, globals(), locals(), [obj_name]) + try: + return getattr(module, obj_name) + except AttributeError as e: + raise ImportError(e) from None + + except ImportError as e: + if not silent: + raise ImportStringError(import_name, e).with_traceback( + sys.exc_info()[2] + ) from None + + return None + + +def find_modules( + import_path: str, include_packages: bool = False, recursive: bool = False +) -> t.Iterator[str]: + """Finds all the modules below a package. This can be useful to + automatically import all views / controllers so that their metaclasses / + function decorators have a chance to register themselves on the + application. + + Packages are not returned unless `include_packages` is `True`. This can + also recursively list modules but in that case it will import all the + packages to get the correct load path of that module. + + :param import_path: the dotted name for the package to find child modules. + :param include_packages: set to `True` if packages should be returned, too. + :param recursive: set to `True` if recursion should happen. + :return: generator + """ + module = import_string(import_path) + path = getattr(module, "__path__", None) + if path is None: + raise ValueError(f"{import_path!r} is not a package") + basename = f"{module.__name__}." + for _importer, modname, ispkg in pkgutil.iter_modules(path): + modname = basename + modname + if ispkg: + if include_packages: + yield modname + if recursive: + yield from find_modules(modname, include_packages, True) + else: + yield modname + + +class ImportStringError(ImportError): + """Provides information about a failed :func:`import_string` attempt.""" + + #: String in dotted notation that failed to be imported. + import_name: str + #: Wrapped exception. + exception: BaseException + + def __init__(self, import_name: str, exception: BaseException) -> None: + self.import_name = import_name + self.exception = exception + msg = import_name + name = "" + tracked = [] + for part in import_name.replace(":", ".").split("."): + name = f"{name}.{part}" if name else part + imported = import_string(name, silent=True) + if imported: + tracked.append((name, getattr(imported, "__file__", None))) + else: + track = [f"- {n!r} found in {i!r}." for n, i in tracked] + track.append(f"- {name!r} not found.") + track_str = "\n".join(track) + msg = ( + f"import_string() failed for {import_name!r}. Possible reasons" + f" are:\n\n" + "- missing __init__.py in a package;\n" + "- package or module path not included in sys.path;\n" + "- duplicated package or module name taking precedence in" + " sys.path;\n" + "- missing module, class, function or variable;\n\n" + f"Debugged import:\n\n{track_str}\n\n" + f"Original exception:\n\n{type(exception).__name__}: {exception}" + ) + break + + super().__init__(msg) + + def __repr__(self) -> str: + return f"<{type(self).__name__}({self.import_name!r}, {self.exception!r})>" diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py new file mode 120000 index 0000000..f045a4f --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/90/6c/8a/eeb3ae777a82c6597f8c5c96d7961aac9863d71b5d7f7a1cc7d661781f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..39ac7da Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/request.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/request.cpython-38.pyc new file mode 100644 index 0000000..873b827 Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/request.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/response.cpython-38.pyc b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/response.cpython-38.pyc new file mode 100644 index 0000000..ad26ada Binary files /dev/null and b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/response.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py new file mode 120000 index 0000000..ea56147 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/51/0e/79/f4a9064b43e8e874c182f28c964d7f02c370e73b2dce6f28fdd46b7dd4 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py new file mode 120000 index 0000000..4f0b7c5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/73/61/d4/5ebaede527fcf97101d5f517c66ea3a7b2eef34291d00fed6d016fc354 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/werkzeug/wsgi.py b/venv/lib/python3.8/site-packages/werkzeug/wsgi.py new file mode 100644 index 0000000..24ece0b --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wsgi.py @@ -0,0 +1,1020 @@ +import io +import re +import typing as t +import warnings +from functools import partial +from functools import update_wrapper +from itertools import chain + +from ._internal import _make_encode_wrapper +from ._internal import _to_bytes +from ._internal import _to_str +from .sansio import utils as _sansio_utils +from .sansio.utils import host_is_trusted # noqa: F401 # Imported as part of API +from .urls import _URLTuple +from .urls import uri_to_iri +from .urls import url_join +from .urls import url_parse +from .urls import url_quote + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +def responder(f: t.Callable[..., "WSGIApplication"]) -> "WSGIApplication": + """Marks a function as responder. Decorate a function with it and it + will automatically call the return value as WSGI application. + + Example:: + + @responder + def application(environ, start_response): + return Response('Hello World!') + """ + return update_wrapper(lambda *a: f(*a)(*a[-2:]), f) + + +def get_current_url( + environ: "WSGIEnvironment", + root_only: bool = False, + strip_querystring: bool = False, + host_only: bool = False, + trusted_hosts: t.Optional[t.Iterable[str]] = None, +) -> str: + """Recreate the URL for a request from the parts in a WSGI + environment. + + The URL is an IRI, not a URI, so it may contain Unicode characters. + Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII. + + :param environ: The WSGI environment to get the URL parts from. + :param root_only: Only build the root path, don't include the + remaining path or query string. + :param strip_querystring: Don't include the query string. + :param host_only: Only build the scheme and host. + :param trusted_hosts: A list of trusted host names to validate the + host against. + """ + parts = { + "scheme": environ["wsgi.url_scheme"], + "host": get_host(environ, trusted_hosts), + } + + if not host_only: + parts["root_path"] = environ.get("SCRIPT_NAME", "") + + if not root_only: + parts["path"] = environ.get("PATH_INFO", "") + + if not strip_querystring: + parts["query_string"] = environ.get("QUERY_STRING", "").encode("latin1") + + return _sansio_utils.get_current_url(**parts) + + +def _get_server( + environ: "WSGIEnvironment", +) -> t.Optional[t.Tuple[str, t.Optional[int]]]: + name = environ.get("SERVER_NAME") + + if name is None: + return None + + try: + port: t.Optional[int] = int(environ.get("SERVER_PORT", None)) + except (TypeError, ValueError): + # unix socket + port = None + + return name, port + + +def get_host( + environ: "WSGIEnvironment", trusted_hosts: t.Optional[t.Iterable[str]] = None +) -> str: + """Return the host for the given WSGI environment. + + The ``Host`` header is preferred, then ``SERVER_NAME`` if it's not + set. The returned host will only contain the port if it is different + than the standard port for the protocol. + + Optionally, verify that the host is trusted using + :func:`host_is_trusted` and raise a + :exc:`~werkzeug.exceptions.SecurityError` if it is not. + + :param environ: A WSGI environment dict. + :param trusted_hosts: A list of trusted host names. + + :return: Host, with port if necessary. + :raise ~werkzeug.exceptions.SecurityError: If the host is not + trusted. + """ + return _sansio_utils.get_host( + environ["wsgi.url_scheme"], + environ.get("HTTP_HOST"), + _get_server(environ), + trusted_hosts, + ) + + +def get_content_length(environ: "WSGIEnvironment") -> t.Optional[int]: + """Returns the content length from the WSGI environment as + integer. If it's not available or chunked transfer encoding is used, + ``None`` is returned. + + .. versionadded:: 0.9 + + :param environ: the WSGI environ to fetch the content length from. + """ + return _sansio_utils.get_content_length( + http_content_length=environ.get("CONTENT_LENGTH"), + http_transfer_encoding=environ.get("HTTP_TRANSFER_ENCODING", ""), + ) + + +def get_input_stream( + environ: "WSGIEnvironment", safe_fallback: bool = True +) -> t.IO[bytes]: + """Returns the input stream from the WSGI environment and wraps it + in the most sensible way possible. The stream returned is not the + raw WSGI stream in most cases but one that is safe to read from + without taking into account the content length. + + If content length is not set, the stream will be empty for safety reasons. + If the WSGI server supports chunked or infinite streams, it should set + the ``wsgi.input_terminated`` value in the WSGI environ to indicate that. + + .. versionadded:: 0.9 + + :param environ: the WSGI environ to fetch the stream from. + :param safe_fallback: use an empty stream as a safe fallback when the + content length is not set. Disabling this allows infinite streams, + which can be a denial-of-service risk. + """ + stream = t.cast(t.IO[bytes], environ["wsgi.input"]) + content_length = get_content_length(environ) + + # A wsgi extension that tells us if the input is terminated. In + # that case we return the stream unchanged as we know we can safely + # read it until the end. + if environ.get("wsgi.input_terminated"): + return stream + + # If the request doesn't specify a content length, returning the stream is + # potentially dangerous because it could be infinite, malicious or not. If + # safe_fallback is true, return an empty stream instead for safety. + if content_length is None: + return io.BytesIO() if safe_fallback else stream + + # Otherwise limit the stream to the content length + return t.cast(t.IO[bytes], LimitedStream(stream, content_length)) + + +def get_query_string(environ: "WSGIEnvironment") -> str: + """Returns the ``QUERY_STRING`` from the WSGI environment. This also + takes care of the WSGI decoding dance. The string returned will be + restricted to ASCII characters. + + :param environ: WSGI environment to get the query string from. + + .. deprecated:: 2.2 + Will be removed in Werkzeug 2.3. + + .. versionadded:: 0.9 + """ + warnings.warn( + "'get_query_string' is deprecated and will be removed in Werkzeug 2.3.", + DeprecationWarning, + stacklevel=2, + ) + qs = environ.get("QUERY_STRING", "").encode("latin1") + # QUERY_STRING really should be ascii safe but some browsers + # will send us some unicode stuff (I am looking at you IE). + # In that case we want to urllib quote it badly. + return url_quote(qs, safe=":&%=+$!*'(),") + + +def get_path_info( + environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" +) -> str: + """Return the ``PATH_INFO`` from the WSGI environment and decode it + unless ``charset`` is ``None``. + + :param environ: WSGI environment to get the path from. + :param charset: The charset for the path info, or ``None`` if no + decoding should be performed. + :param errors: The decoding error handling. + + .. versionadded:: 0.9 + """ + path = environ.get("PATH_INFO", "").encode("latin1") + return _to_str(path, charset, errors, allow_none_charset=True) # type: ignore + + +def get_script_name( + environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" +) -> str: + """Return the ``SCRIPT_NAME`` from the WSGI environment and decode + it unless `charset` is set to ``None``. + + :param environ: WSGI environment to get the path from. + :param charset: The charset for the path, or ``None`` if no decoding + should be performed. + :param errors: The decoding error handling. + + .. deprecated:: 2.2 + Will be removed in Werkzeug 2.3. + + .. versionadded:: 0.9 + """ + warnings.warn( + "'get_script_name' is deprecated and will be removed in Werkzeug 2.3.", + DeprecationWarning, + stacklevel=2, + ) + path = environ.get("SCRIPT_NAME", "").encode("latin1") + return _to_str(path, charset, errors, allow_none_charset=True) # type: ignore + + +def pop_path_info( + environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" +) -> t.Optional[str]: + """Removes and returns the next segment of `PATH_INFO`, pushing it onto + `SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`. + + If the `charset` is set to `None` bytes are returned. + + If there are empty segments (``'/foo//bar``) these are ignored but + properly pushed to the `SCRIPT_NAME`: + + >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} + >>> pop_path_info(env) + 'a' + >>> env['SCRIPT_NAME'] + '/foo/a' + >>> pop_path_info(env) + 'b' + >>> env['SCRIPT_NAME'] + '/foo/a/b' + + .. deprecated:: 2.2 + Will be removed in Werkzeug 2.3. + + .. versionadded:: 0.5 + + .. versionchanged:: 0.9 + The path is now decoded and a charset and encoding + parameter can be provided. + + :param environ: the WSGI environment that is modified. + :param charset: The ``encoding`` parameter passed to + :func:`bytes.decode`. + :param errors: The ``errors`` paramater passed to + :func:`bytes.decode`. + """ + warnings.warn( + "'pop_path_info' is deprecated and will be removed in Werkzeug 2.3.", + DeprecationWarning, + stacklevel=2, + ) + + path = environ.get("PATH_INFO") + if not path: + return None + + script_name = environ.get("SCRIPT_NAME", "") + + # shift multiple leading slashes over + old_path = path + path = path.lstrip("/") + if path != old_path: + script_name += "/" * (len(old_path) - len(path)) + + if "/" not in path: + environ["PATH_INFO"] = "" + environ["SCRIPT_NAME"] = script_name + path + rv = path.encode("latin1") + else: + segment, path = path.split("/", 1) + environ["PATH_INFO"] = f"/{path}" + environ["SCRIPT_NAME"] = script_name + segment + rv = segment.encode("latin1") + + return _to_str(rv, charset, errors, allow_none_charset=True) # type: ignore + + +def peek_path_info( + environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" +) -> t.Optional[str]: + """Returns the next segment on the `PATH_INFO` or `None` if there + is none. Works like :func:`pop_path_info` without modifying the + environment: + + >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} + >>> peek_path_info(env) + 'a' + >>> peek_path_info(env) + 'a' + + If the `charset` is set to `None` bytes are returned. + + .. deprecated:: 2.2 + Will be removed in Werkzeug 2.3. + + .. versionadded:: 0.5 + + .. versionchanged:: 0.9 + The path is now decoded and a charset and encoding + parameter can be provided. + + :param environ: the WSGI environment that is checked. + """ + warnings.warn( + "'peek_path_info' is deprecated and will be removed in Werkzeug 2.3.", + DeprecationWarning, + stacklevel=2, + ) + + segments = environ.get("PATH_INFO", "").lstrip("/").split("/", 1) + if segments: + return _to_str( # type: ignore + segments[0].encode("latin1"), charset, errors, allow_none_charset=True + ) + return None + + +def extract_path_info( + environ_or_baseurl: t.Union[str, "WSGIEnvironment"], + path_or_url: t.Union[str, _URLTuple], + charset: str = "utf-8", + errors: str = "werkzeug.url_quote", + collapse_http_schemes: bool = True, +) -> t.Optional[str]: + """Extracts the path info from the given URL (or WSGI environment) and + path. The path info returned is a string. The URLs might also be IRIs. + + If the path info could not be determined, `None` is returned. + + Some examples: + + >>> extract_path_info('http://example.com/app', '/app/hello') + '/hello' + >>> extract_path_info('http://example.com/app', + ... 'https://example.com/app/hello') + '/hello' + >>> extract_path_info('http://example.com/app', + ... 'https://example.com/app/hello', + ... collapse_http_schemes=False) is None + True + + Instead of providing a base URL you can also pass a WSGI environment. + + :param environ_or_baseurl: a WSGI environment dict, a base URL or + base IRI. This is the root of the + application. + :param path_or_url: an absolute path from the server root, a + relative path (in which case it's the path info) + or a full URL. + :param charset: the charset for byte data in URLs + :param errors: the error handling on decode + :param collapse_http_schemes: if set to `False` the algorithm does + not assume that http and https on the + same server point to the same + resource. + + .. deprecated:: 2.2 + Will be removed in Werkzeug 2.3. + + .. versionchanged:: 0.15 + The ``errors`` parameter defaults to leaving invalid bytes + quoted instead of replacing them. + + .. versionadded:: 0.6 + + """ + warnings.warn( + "'extract_path_info' is deprecated and will be removed in Werkzeug 2.3.", + DeprecationWarning, + stacklevel=2, + ) + + def _normalize_netloc(scheme: str, netloc: str) -> str: + parts = netloc.split("@", 1)[-1].split(":", 1) + port: t.Optional[str] + + if len(parts) == 2: + netloc, port = parts + if (scheme == "http" and port == "80") or ( + scheme == "https" and port == "443" + ): + port = None + else: + netloc = parts[0] + port = None + + if port is not None: + netloc += f":{port}" + + return netloc + + # make sure whatever we are working on is a IRI and parse it + path = uri_to_iri(path_or_url, charset, errors) + if isinstance(environ_or_baseurl, dict): + environ_or_baseurl = get_current_url(environ_or_baseurl, root_only=True) + base_iri = uri_to_iri(environ_or_baseurl, charset, errors) + base_scheme, base_netloc, base_path = url_parse(base_iri)[:3] + cur_scheme, cur_netloc, cur_path = url_parse(url_join(base_iri, path))[:3] + + # normalize the network location + base_netloc = _normalize_netloc(base_scheme, base_netloc) + cur_netloc = _normalize_netloc(cur_scheme, cur_netloc) + + # is that IRI even on a known HTTP scheme? + if collapse_http_schemes: + for scheme in base_scheme, cur_scheme: + if scheme not in ("http", "https"): + return None + else: + if not (base_scheme in ("http", "https") and base_scheme == cur_scheme): + return None + + # are the netlocs compatible? + if base_netloc != cur_netloc: + return None + + # are we below the application path? + base_path = base_path.rstrip("/") + if not cur_path.startswith(base_path): + return None + + return f"/{cur_path[len(base_path) :].lstrip('/')}" + + +class ClosingIterator: + """The WSGI specification requires that all middlewares and gateways + respect the `close` callback of the iterable returned by the application. + Because it is useful to add another close action to a returned iterable + and adding a custom iterable is a boring task this class can be used for + that:: + + return ClosingIterator(app(environ, start_response), [cleanup_session, + cleanup_locals]) + + If there is just one close function it can be passed instead of the list. + + A closing iterator is not needed if the application uses response objects + and finishes the processing if the response is started:: + + try: + return response(environ, start_response) + finally: + cleanup_session() + cleanup_locals() + """ + + def __init__( + self, + iterable: t.Iterable[bytes], + callbacks: t.Optional[ + t.Union[t.Callable[[], None], t.Iterable[t.Callable[[], None]]] + ] = None, + ) -> None: + iterator = iter(iterable) + self._next = t.cast(t.Callable[[], bytes], partial(next, iterator)) + if callbacks is None: + callbacks = [] + elif callable(callbacks): + callbacks = [callbacks] + else: + callbacks = list(callbacks) + iterable_close = getattr(iterable, "close", None) + if iterable_close: + callbacks.insert(0, iterable_close) + self._callbacks = callbacks + + def __iter__(self) -> "ClosingIterator": + return self + + def __next__(self) -> bytes: + return self._next() + + def close(self) -> None: + for callback in self._callbacks: + callback() + + +def wrap_file( + environ: "WSGIEnvironment", file: t.IO[bytes], buffer_size: int = 8192 +) -> t.Iterable[bytes]: + """Wraps a file. This uses the WSGI server's file wrapper if available + or otherwise the generic :class:`FileWrapper`. + + .. versionadded:: 0.5 + + If the file wrapper from the WSGI server is used it's important to not + iterate over it from inside the application but to pass it through + unchanged. If you want to pass out a file wrapper inside a response + object you have to set :attr:`Response.direct_passthrough` to `True`. + + More information about file wrappers are available in :pep:`333`. + + :param file: a :class:`file`-like object with a :meth:`~file.read` method. + :param buffer_size: number of bytes for one iteration. + """ + return environ.get("wsgi.file_wrapper", FileWrapper)( # type: ignore + file, buffer_size + ) + + +class FileWrapper: + """This class can be used to convert a :class:`file`-like object into + an iterable. It yields `buffer_size` blocks until the file is fully + read. + + You should not use this class directly but rather use the + :func:`wrap_file` function that uses the WSGI server's file wrapper + support if it's available. + + .. versionadded:: 0.5 + + If you're using this object together with a :class:`Response` you have + to use the `direct_passthrough` mode. + + :param file: a :class:`file`-like object with a :meth:`~file.read` method. + :param buffer_size: number of bytes for one iteration. + """ + + def __init__(self, file: t.IO[bytes], buffer_size: int = 8192) -> None: + self.file = file + self.buffer_size = buffer_size + + def close(self) -> None: + if hasattr(self.file, "close"): + self.file.close() + + def seekable(self) -> bool: + if hasattr(self.file, "seekable"): + return self.file.seekable() + if hasattr(self.file, "seek"): + return True + return False + + def seek(self, *args: t.Any) -> None: + if hasattr(self.file, "seek"): + self.file.seek(*args) + + def tell(self) -> t.Optional[int]: + if hasattr(self.file, "tell"): + return self.file.tell() + return None + + def __iter__(self) -> "FileWrapper": + return self + + def __next__(self) -> bytes: + data = self.file.read(self.buffer_size) + if data: + return data + raise StopIteration() + + +class _RangeWrapper: + # private for now, but should we make it public in the future ? + + """This class can be used to convert an iterable object into + an iterable that will only yield a piece of the underlying content. + It yields blocks until the underlying stream range is fully read. + The yielded blocks will have a size that can't exceed the original + iterator defined block size, but that can be smaller. + + If you're using this object together with a :class:`Response` you have + to use the `direct_passthrough` mode. + + :param iterable: an iterable object with a :meth:`__next__` method. + :param start_byte: byte from which read will start. + :param byte_range: how many bytes to read. + """ + + def __init__( + self, + iterable: t.Union[t.Iterable[bytes], t.IO[bytes]], + start_byte: int = 0, + byte_range: t.Optional[int] = None, + ): + self.iterable = iter(iterable) + self.byte_range = byte_range + self.start_byte = start_byte + self.end_byte = None + + if byte_range is not None: + self.end_byte = start_byte + byte_range + + self.read_length = 0 + self.seekable = ( + hasattr(iterable, "seekable") and iterable.seekable() # type: ignore + ) + self.end_reached = False + + def __iter__(self) -> "_RangeWrapper": + return self + + def _next_chunk(self) -> bytes: + try: + chunk = next(self.iterable) + self.read_length += len(chunk) + return chunk + except StopIteration: + self.end_reached = True + raise + + def _first_iteration(self) -> t.Tuple[t.Optional[bytes], int]: + chunk = None + if self.seekable: + self.iterable.seek(self.start_byte) # type: ignore + self.read_length = self.iterable.tell() # type: ignore + contextual_read_length = self.read_length + else: + while self.read_length <= self.start_byte: + chunk = self._next_chunk() + if chunk is not None: + chunk = chunk[self.start_byte - self.read_length :] + contextual_read_length = self.start_byte + return chunk, contextual_read_length + + def _next(self) -> bytes: + if self.end_reached: + raise StopIteration() + chunk = None + contextual_read_length = self.read_length + if self.read_length == 0: + chunk, contextual_read_length = self._first_iteration() + if chunk is None: + chunk = self._next_chunk() + if self.end_byte is not None and self.read_length >= self.end_byte: + self.end_reached = True + return chunk[: self.end_byte - contextual_read_length] + return chunk + + def __next__(self) -> bytes: + chunk = self._next() + if chunk: + return chunk + self.end_reached = True + raise StopIteration() + + def close(self) -> None: + if hasattr(self.iterable, "close"): + self.iterable.close() # type: ignore + + +def _make_chunk_iter( + stream: t.Union[t.Iterable[bytes], t.IO[bytes]], + limit: t.Optional[int], + buffer_size: int, +) -> t.Iterator[bytes]: + """Helper for the line and chunk iter functions.""" + if isinstance(stream, (bytes, bytearray, str)): + raise TypeError( + "Passed a string or byte object instead of true iterator or stream." + ) + if not hasattr(stream, "read"): + for item in stream: + if item: + yield item + return + stream = t.cast(t.IO[bytes], stream) + if not isinstance(stream, LimitedStream) and limit is not None: + stream = t.cast(t.IO[bytes], LimitedStream(stream, limit)) + _read = stream.read + while True: + item = _read(buffer_size) + if not item: + break + yield item + + +def make_line_iter( + stream: t.Union[t.Iterable[bytes], t.IO[bytes]], + limit: t.Optional[int] = None, + buffer_size: int = 10 * 1024, + cap_at_buffer: bool = False, +) -> t.Iterator[bytes]: + """Safely iterates line-based over an input stream. If the input stream + is not a :class:`LimitedStream` the `limit` parameter is mandatory. + + This uses the stream's :meth:`~file.read` method internally as opposite + to the :meth:`~file.readline` method that is unsafe and can only be used + in violation of the WSGI specification. The same problem applies to the + `__iter__` function of the input stream which calls :meth:`~file.readline` + without arguments. + + If you need line-by-line processing it's strongly recommended to iterate + over the input stream using this helper function. + + .. versionchanged:: 0.8 + This function now ensures that the limit was reached. + + .. versionadded:: 0.9 + added support for iterators as input stream. + + .. versionadded:: 0.11.10 + added support for the `cap_at_buffer` parameter. + + :param stream: the stream or iterate to iterate over. + :param limit: the limit in bytes for the stream. (Usually + content length. Not necessary if the `stream` + is a :class:`LimitedStream`. + :param buffer_size: The optional buffer size. + :param cap_at_buffer: if this is set chunks are split if they are longer + than the buffer size. Internally this is implemented + that the buffer size might be exhausted by a factor + of two however. + """ + _iter = _make_chunk_iter(stream, limit, buffer_size) + + first_item = next(_iter, "") + if not first_item: + return + + s = _make_encode_wrapper(first_item) + empty = t.cast(bytes, s("")) + cr = t.cast(bytes, s("\r")) + lf = t.cast(bytes, s("\n")) + crlf = t.cast(bytes, s("\r\n")) + + _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) + + def _iter_basic_lines() -> t.Iterator[bytes]: + _join = empty.join + buffer: t.List[bytes] = [] + while True: + new_data = next(_iter, "") + if not new_data: + break + new_buf: t.List[bytes] = [] + buf_size = 0 + for item in t.cast( + t.Iterator[bytes], chain(buffer, new_data.splitlines(True)) + ): + new_buf.append(item) + buf_size += len(item) + if item and item[-1:] in crlf: + yield _join(new_buf) + new_buf = [] + elif cap_at_buffer and buf_size >= buffer_size: + rv = _join(new_buf) + while len(rv) >= buffer_size: + yield rv[:buffer_size] + rv = rv[buffer_size:] + new_buf = [rv] + buffer = new_buf + if buffer: + yield _join(buffer) + + # This hackery is necessary to merge 'foo\r' and '\n' into one item + # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. + previous = empty + for item in _iter_basic_lines(): + if item == lf and previous[-1:] == cr: + previous += item + item = empty + if previous: + yield previous + previous = item + if previous: + yield previous + + +def make_chunk_iter( + stream: t.Union[t.Iterable[bytes], t.IO[bytes]], + separator: bytes, + limit: t.Optional[int] = None, + buffer_size: int = 10 * 1024, + cap_at_buffer: bool = False, +) -> t.Iterator[bytes]: + """Works like :func:`make_line_iter` but accepts a separator + which divides chunks. If you want newline based processing + you should use :func:`make_line_iter` instead as it + supports arbitrary newline markers. + + .. versionadded:: 0.8 + + .. versionadded:: 0.9 + added support for iterators as input stream. + + .. versionadded:: 0.11.10 + added support for the `cap_at_buffer` parameter. + + :param stream: the stream or iterate to iterate over. + :param separator: the separator that divides chunks. + :param limit: the limit in bytes for the stream. (Usually + content length. Not necessary if the `stream` + is otherwise already limited). + :param buffer_size: The optional buffer size. + :param cap_at_buffer: if this is set chunks are split if they are longer + than the buffer size. Internally this is implemented + that the buffer size might be exhausted by a factor + of two however. + """ + _iter = _make_chunk_iter(stream, limit, buffer_size) + + first_item = next(_iter, b"") + if not first_item: + return + + _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) + if isinstance(first_item, str): + separator = _to_str(separator) + _split = re.compile(f"({re.escape(separator)})").split + _join = "".join + else: + separator = _to_bytes(separator) + _split = re.compile(b"(" + re.escape(separator) + b")").split + _join = b"".join + + buffer: t.List[bytes] = [] + while True: + new_data = next(_iter, b"") + if not new_data: + break + chunks = _split(new_data) + new_buf: t.List[bytes] = [] + buf_size = 0 + for item in chain(buffer, chunks): + if item == separator: + yield _join(new_buf) + new_buf = [] + buf_size = 0 + else: + buf_size += len(item) + new_buf.append(item) + + if cap_at_buffer and buf_size >= buffer_size: + rv = _join(new_buf) + while len(rv) >= buffer_size: + yield rv[:buffer_size] + rv = rv[buffer_size:] + new_buf = [rv] + buf_size = len(rv) + + buffer = new_buf + if buffer: + yield _join(buffer) + + +class LimitedStream(io.IOBase): + """Wraps a stream so that it doesn't read more than n bytes. If the + stream is exhausted and the caller tries to get more bytes from it + :func:`on_exhausted` is called which by default returns an empty + string. The return value of that function is forwarded + to the reader function. So if it returns an empty string + :meth:`read` will return an empty string as well. + + The limit however must never be higher than what the stream can + output. Otherwise :meth:`readlines` will try to read past the + limit. + + .. admonition:: Note on WSGI compliance + + calls to :meth:`readline` and :meth:`readlines` are not + WSGI compliant because it passes a size argument to the + readline methods. Unfortunately the WSGI PEP is not safely + implementable without a size argument to :meth:`readline` + because there is no EOF marker in the stream. As a result + of that the use of :meth:`readline` is discouraged. + + For the same reason iterating over the :class:`LimitedStream` + is not portable. It internally calls :meth:`readline`. + + We strongly suggest using :meth:`read` only or using the + :func:`make_line_iter` which safely iterates line-based + over a WSGI input stream. + + :param stream: the stream to wrap. + :param limit: the limit for the stream, must not be longer than + what the string can provide if the stream does not + end with `EOF` (like `wsgi.input`) + """ + + def __init__(self, stream: t.IO[bytes], limit: int) -> None: + self._read = stream.read + self._readline = stream.readline + self._pos = 0 + self.limit = limit + + def __iter__(self) -> "LimitedStream": + return self + + @property + def is_exhausted(self) -> bool: + """If the stream is exhausted this attribute is `True`.""" + return self._pos >= self.limit + + def on_exhausted(self) -> bytes: + """This is called when the stream tries to read past the limit. + The return value of this function is returned from the reading + function. + """ + # Read null bytes from the stream so that we get the + # correct end of stream marker. + return self._read(0) + + def on_disconnect(self) -> bytes: + """What should happen if a disconnect is detected? The return + value of this function is returned from read functions in case + the client went away. By default a + :exc:`~werkzeug.exceptions.ClientDisconnected` exception is raised. + """ + from .exceptions import ClientDisconnected + + raise ClientDisconnected() + + def exhaust(self, chunk_size: int = 1024 * 64) -> None: + """Exhaust the stream. This consumes all the data left until the + limit is reached. + + :param chunk_size: the size for a chunk. It will read the chunk + until the stream is exhausted and throw away + the results. + """ + to_read = self.limit - self._pos + chunk = chunk_size + while to_read > 0: + chunk = min(to_read, chunk) + self.read(chunk) + to_read -= chunk + + def read(self, size: t.Optional[int] = None) -> bytes: + """Read `size` bytes or if size is not provided everything is read. + + :param size: the number of bytes read. + """ + if self._pos >= self.limit: + return self.on_exhausted() + if size is None or size == -1: # -1 is for consistence with file + size = self.limit + to_read = min(self.limit - self._pos, size) + try: + read = self._read(to_read) + except (OSError, ValueError): + return self.on_disconnect() + if to_read and len(read) != to_read: + return self.on_disconnect() + self._pos += len(read) + return read + + def readline(self, size: t.Optional[int] = None) -> bytes: + """Reads one line from the stream.""" + if self._pos >= self.limit: + return self.on_exhausted() + if size is None: + size = self.limit - self._pos + else: + size = min(size, self.limit - self._pos) + try: + line = self._readline(size) + except (ValueError, OSError): + return self.on_disconnect() + if size and not line: + return self.on_disconnect() + self._pos += len(line) + return line + + def readlines(self, size: t.Optional[int] = None) -> t.List[bytes]: + """Reads a file into a list of strings. It calls :meth:`readline` + until the file is read to the end. It does support the optional + `size` argument if the underlying stream supports it for + `readline`. + """ + last_pos = self._pos + result = [] + if size is not None: + end = min(self.limit, last_pos + size) + else: + end = self.limit + while True: + if size is not None: + size -= last_pos - self._pos + if self._pos >= end: + break + result.append(self.readline(size)) + if size is not None: + last_pos = self._pos + return result + + def tell(self) -> int: + """Returns the position of the stream. + + .. versionadded:: 0.9 + """ + return self._pos + + def __next__(self) -> bytes: + line = self.readline() + if not line: + raise StopIteration() + return line + + def readable(self) -> bool: + return True diff --git a/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/LICENSE b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/LICENSE new file mode 120000 index 0000000..1781863 --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/94/fe/ba/8020b2fb910d805aea38b0d262e6147be1650ed43572a188eeaf85378b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/METADATA new file mode 120000 index 0000000..ef371f7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/4f/ed/7c/5f1b29a19e5cfbb461cecd658d793a024215295422b0320b4dcc4c960e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/RECORD new file mode 100644 index 0000000..647d7aa --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/RECORD @@ -0,0 +1,18 @@ +whatthepatch-1.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +whatthepatch-1.0.2.dist-info/LICENSE,sha256=lP66gCCy-5ENgFrqOLDSYuYUe-FlDtQ1cqGI7q-FN4s,1096 +whatthepatch-1.0.2.dist-info/METADATA,sha256=T-18XxspoZ5c-7Rhzs1ljXk6AkIVKVQisDILTcxMlg4,6933 +whatthepatch-1.0.2.dist-info/RECORD,, +whatthepatch-1.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +whatthepatch-1.0.2.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +whatthepatch-1.0.2.dist-info/direct_url.json,sha256=QXvGrdWOwSEX1dbP8g48ym10TlvaggUYbKSNJ3GLloc,258 +whatthepatch-1.0.2.dist-info/top_level.txt,sha256=vJjuzH1xEc7fi9xXt09pnT-L3AvOvUc3vIlHfzuh3CQ,13 +whatthepatch/__init__.py,sha256=IEJW9S_LvHYdumagspbvukJT9qRMbfEgrq4NV83eH2o,127 +whatthepatch/__pycache__/__init__.cpython-38.pyc,, +whatthepatch/__pycache__/apply.cpython-38.pyc,, +whatthepatch/__pycache__/exceptions.cpython-38.pyc,, +whatthepatch/__pycache__/patch.cpython-38.pyc,, +whatthepatch/__pycache__/snippets.cpython-38.pyc,, +whatthepatch/apply.py,sha256=29JTqDxNMoICujwedY1jdhKzz2Nok5piZI9tIGYIk3I,3800 +whatthepatch/exceptions.py,sha256=3g40g57CJCsqQT3I-kotgOq-j01gedSte4PDXP4CwYU,768 +whatthepatch/patch.py,sha256=20zPLahbQby0PXVlKi9vA7KtAMs1fT4-6gINKqs2s-c,25938 +whatthepatch/snippets.py,sha256=5gbwd18ZZDvd-mkvd9-wEveNtqIcfZz9m6Gn-SNfTq4,1402 diff --git a/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/WHEEL new file mode 120000 index 0000000..f6d70d4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/67/e9/f2/629c2b712ab17ddbb1e4c6e7fc3439db988fec9d831b72601af398c934 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/direct_url.json new file mode 100644 index 0000000..48bbdfd --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=6d920d6c328eda006f12ec05bc9eadb405957b18460520db195f9a21eb3336e1"}, "url": "https://files.pythonhosted.org/packages/fe/31/bf06fa31436e8118447bed0a428a1279d1164ebdc2d8866666033ab03ca8/whatthepatch-1.0.2-py2.py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/top_level.txt new file mode 120000 index 0000000..ed4318e --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch-1.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bc/98/ee/cc7d7111cedf8bdc57b74f699d3f8bdc0bcebd4737bc89477f3ba1dc24 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/whatthepatch/__init__.py b/venv/lib/python3.8/site-packages/whatthepatch/__init__.py new file mode 120000 index 0000000..8d39608 --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/20/42/56/f52fcbbc761dba66a0b296efba4253f6a44c6df120aeae0d57cdde1f6a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..a3b32cf Binary files /dev/null and b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/apply.cpython-38.pyc b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/apply.cpython-38.pyc new file mode 100644 index 0000000..9d49898 Binary files /dev/null and b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/apply.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..0d4afee Binary files /dev/null and b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/exceptions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/patch.cpython-38.pyc b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/patch.cpython-38.pyc new file mode 100644 index 0000000..07da73b Binary files /dev/null and b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/patch.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/snippets.cpython-38.pyc b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/snippets.cpython-38.pyc new file mode 100644 index 0000000..abfbc3f Binary files /dev/null and b/venv/lib/python3.8/site-packages/whatthepatch/__pycache__/snippets.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/whatthepatch/apply.py b/venv/lib/python3.8/site-packages/whatthepatch/apply.py new file mode 120000 index 0000000..0860077 --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch/apply.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/db/d2/53/a83c4d328202ba3c1e758d637612b3cf6368939a62648f6d2066089372 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/whatthepatch/exceptions.py b/venv/lib/python3.8/site-packages/whatthepatch/exceptions.py new file mode 120000 index 0000000..e297fd8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch/exceptions.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/de/0e/34/839ec2242b2a413dc8fa4a2d80eabe8f4d6079d4ad7b83c35cfe02c185 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/whatthepatch/patch.py b/venv/lib/python3.8/site-packages/whatthepatch/patch.py new file mode 120000 index 0000000..9168609 --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch/patch.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/db/4c/cf/2da85b41bcb43d75652a2f6f03b2ad00cb357d3e3eea020d2aab36b3e7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/whatthepatch/snippets.py b/venv/lib/python3.8/site-packages/whatthepatch/snippets.py new file mode 120000 index 0000000..1dcd313 --- /dev/null +++ b/venv/lib/python3.8/site-packages/whatthepatch/snippets.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e6/06/f0/775f19643bddfa692f77dfb012f78db6a21c7d9cfd9ba1a7f9235f4eae \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/AUTHORS b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/AUTHORS new file mode 120000 index 0000000..dca4f35 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/AUTHORS @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1a/35/91/f17972f8397d56b2aecd32cebd1a026cc372dbbf03c4ad92b9df4b702c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/LICENSE b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/LICENSE new file mode 120000 index 0000000..acbcd57 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/cf/c7/74/9b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/METADATA b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/METADATA new file mode 120000 index 0000000..e9fa980 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/METADATA @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/aa/d3/da/446af024c8b168dbbb31bb852a7aa05f4f28fc0b06411105f342c44826 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/RECORD b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/RECORD new file mode 100644 index 0000000..360c887 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/RECORD @@ -0,0 +1,118 @@ +../../../bin/yapf,sha256=sI_MGR9IPhEfBgnUH3WRbmNs-DVtHuK5iuCtOas-sRk,215 +../../../bin/yapf-diff,sha256=EKMm1-VO7zhBWgS-soTpqgbPLrMdqEPJpvjKuphJDxs,239 +yapf-0.32.0.dist-info/AUTHORS,sha256=GjWR8Xly-Dl9VrKuzTLOvRoCbMNy278DxK2Sud9LcCw,307 +yapf-0.32.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +yapf-0.32.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +yapf-0.32.0.dist-info/METADATA,sha256=qtPaRGrwJMixaNu7MbuFKnqgX08o_AsGQREF80LESCY,34562 +yapf-0.32.0.dist-info/RECORD,, +yapf-0.32.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +yapf-0.32.0.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 +yapf-0.32.0.dist-info/direct_url.json,sha256=FuvJZ5uqjYMxWbImZjMHmOyMi4fe8z2dp8bD4QEY1jg,251 +yapf-0.32.0.dist-info/entry_points.txt,sha256=fc7WCcYyJWR7Twl8ubci_UQz4gt-U9VYkXKDZODxEHA,94 +yapf-0.32.0.dist-info/top_level.txt,sha256=Y1sKm3Or1-KlBNXU8zv3IuXSJx1lAU-I_Cv_qTNq-WQ,15 +yapf/__init__.py,sha256=pKCGBYSE6mVaIkfAi2fWzQZ3cX7pcJp8a6WlUDpSiTg,12531 +yapf/__main__.py,sha256=jJ5Fcwe6tbXxqSuUxRO3uel44tHP53vVScojlus0cBM,680 +yapf/__pycache__/__init__.cpython-38.pyc,, +yapf/__pycache__/__main__.cpython-38.pyc,, +yapf/third_party/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +yapf/third_party/__pycache__/__init__.cpython-38.pyc,, +yapf/third_party/yapf_diff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +yapf/third_party/yapf_diff/__pycache__/__init__.cpython-38.pyc,, +yapf/third_party/yapf_diff/__pycache__/yapf_diff.cpython-38.pyc,, +yapf/third_party/yapf_diff/yapf_diff.py,sha256=dfci4IR32aTtJIQ8A0jiqlRyo-optVx5p_ymClqY2ng,4800 +yapf/yapflib/__init__.py,sha256=abBqhqqPP10jK6pNCEt5VDVZfYK0u0fQugTp2AC0K4k,596 +yapf/yapflib/__pycache__/__init__.cpython-38.pyc,, +yapf/yapflib/__pycache__/blank_line_calculator.cpython-38.pyc,, +yapf/yapflib/__pycache__/comment_splicer.cpython-38.pyc,, +yapf/yapflib/__pycache__/continuation_splicer.cpython-38.pyc,, +yapf/yapflib/__pycache__/errors.cpython-38.pyc,, +yapf/yapflib/__pycache__/file_resources.cpython-38.pyc,, +yapf/yapflib/__pycache__/format_decision_state.cpython-38.pyc,, +yapf/yapflib/__pycache__/format_token.cpython-38.pyc,, +yapf/yapflib/__pycache__/identify_container.cpython-38.pyc,, +yapf/yapflib/__pycache__/line_joiner.cpython-38.pyc,, +yapf/yapflib/__pycache__/logical_line.cpython-38.pyc,, +yapf/yapflib/__pycache__/object_state.cpython-38.pyc,, +yapf/yapflib/__pycache__/py3compat.cpython-38.pyc,, +yapf/yapflib/__pycache__/pytree_unwrapper.cpython-38.pyc,, +yapf/yapflib/__pycache__/pytree_utils.cpython-38.pyc,, +yapf/yapflib/__pycache__/pytree_visitor.cpython-38.pyc,, +yapf/yapflib/__pycache__/reformatter.cpython-38.pyc,, +yapf/yapflib/__pycache__/split_penalty.cpython-38.pyc,, +yapf/yapflib/__pycache__/style.cpython-38.pyc,, +yapf/yapflib/__pycache__/subtype_assigner.cpython-38.pyc,, +yapf/yapflib/__pycache__/subtypes.cpython-38.pyc,, +yapf/yapflib/__pycache__/verifier.cpython-38.pyc,, +yapf/yapflib/__pycache__/yapf_api.cpython-38.pyc,, +yapf/yapflib/blank_line_calculator.py,sha256=DyVpjQsliyScGWkxakPqQHGYG47SN0250Dh7hOTeEZI,6335 +yapf/yapflib/comment_splicer.py,sha256=UGyYERs4VZB0n5n5tbus8Noa4qNY_C8ZGSwEZKimQa8,15254 +yapf/yapflib/continuation_splicer.py,sha256=5_Npzn98SLISqopH2hB3dSTBDVVs_iuCZBd-NJTNOKc,1776 +yapf/yapflib/errors.py,sha256=kQl1Rf2kGsycdesGk9ck9DRTkB0eL65Z2ZlkdNxFaNs,1482 +yapf/yapflib/file_resources.py,sha256=pInsarnxAtx-19ubxNB9ylM0BmWksC8fw7X3WcWfMd4,9637 +yapf/yapflib/format_decision_state.py,sha256=ZShDwV4ZCM4psk8xhbmUrwNSd4F3uUeCIN1pnbXwMZQ,47987 +yapf/yapflib/format_token.py,sha256=C82rwFV9Z0LUHqcZH99jIpN1ANQJ_Kv-WZwEG1bXXCY,10875 +yapf/yapflib/identify_container.py,sha256=DQKehiPPL0NnOkeuDBwqg7bS0EdU_FDRESzbj2C35cI,2317 +yapf/yapflib/line_joiner.py,sha256=HR_jVQjR3sX7k2PcJ_d8cSFf2chKOW1-gWf2ixeZYcY,3916 +yapf/yapflib/logical_line.py,sha256=MPnPniKqjKJhiU-VIMGZ6HnJTNJHlJ2fIXgFuHcxN2o,25719 +yapf/yapflib/object_state.py,sha256=90O2hZyCWx8SlskSsKcHiuQBAatqSrVkwWVZ5te0Hv8,8122 +yapf/yapflib/py3compat.py,sha256=r-YjKOj8ZLsh1a41cE5RZEFnQZ48CEmHILiwxmzs-DM,4270 +yapf/yapflib/pytree_unwrapper.py,sha256=wEXyeUVpLL0WhuolDwPhH97ZxagSqRvt7A_vtTOameI,14748 +yapf/yapflib/pytree_utils.py,sha256=vLHAqn5K_I9Qm-9eSLru66WiUxWesKwyT1-nbJ59IPk,10934 +yapf/yapflib/pytree_visitor.py,sha256=4k4uurmwgUH2iFE8ooMQFKb5O6tFQnAzi88IiK-WAFg,4529 +yapf/yapflib/reformatter.py,sha256=C8PSyY-CnyZ4rKb1B5VtePfIw2_SC0ojy3c8-R6i6MU,28203 +yapf/yapflib/split_penalty.py,sha256=temPk5wVNaAVLzlxzHbWrIucuPmZCVqVgepRWiLLaRI,24467 +yapf/yapflib/style.py,sha256=4TR1UHvTnUW84pIx1qrQ2HezKMwdaxYLoZhxbVTcsns,31890 +yapf/yapflib/subtype_assigner.py,sha256=OWfiqZ9JcM_TCsb14GheADyXS80nV4X1VVnS3NFu9Sk,19045 +yapf/yapflib/subtypes.py,sha256=A5vh3DAweeEu7s0Hll27ABgtVCUmo9Q_yroQd-M39Vw,1144 +yapf/yapflib/verifier.py,sha256=qT5jlM1j96jeV7YwpdCczKX8vKm51I41TE1mkLX8dkk,3042 +yapf/yapflib/yapf_api.py,sha256=gbWc_QCMWUk5EzpvcL2BEYe5vwp-2itu4WyFGoJS-yk,11878 +yapftests/__init__.py,sha256=abBqhqqPP10jK6pNCEt5VDVZfYK0u0fQugTp2AC0K4k,596 +yapftests/__pycache__/__init__.cpython-38.pyc,, +yapftests/__pycache__/blank_line_calculator_test.cpython-38.pyc,, +yapftests/__pycache__/comment_splicer_test.cpython-38.pyc,, +yapftests/__pycache__/file_resources_test.cpython-38.pyc,, +yapftests/__pycache__/format_decision_state_test.cpython-38.pyc,, +yapftests/__pycache__/format_token_test.cpython-38.pyc,, +yapftests/__pycache__/line_joiner_test.cpython-38.pyc,, +yapftests/__pycache__/logical_line_test.cpython-38.pyc,, +yapftests/__pycache__/main_test.cpython-38.pyc,, +yapftests/__pycache__/pytree_unwrapper_test.cpython-38.pyc,, +yapftests/__pycache__/pytree_utils_test.cpython-38.pyc,, +yapftests/__pycache__/pytree_visitor_test.cpython-38.pyc,, +yapftests/__pycache__/reformatter_basic_test.cpython-38.pyc,, +yapftests/__pycache__/reformatter_buganizer_test.cpython-38.pyc,, +yapftests/__pycache__/reformatter_facebook_test.cpython-38.pyc,, +yapftests/__pycache__/reformatter_pep8_test.cpython-38.pyc,, +yapftests/__pycache__/reformatter_python3_test.cpython-38.pyc,, +yapftests/__pycache__/reformatter_style_config_test.cpython-38.pyc,, +yapftests/__pycache__/reformatter_verify_test.cpython-38.pyc,, +yapftests/__pycache__/split_penalty_test.cpython-38.pyc,, +yapftests/__pycache__/style_test.cpython-38.pyc,, +yapftests/__pycache__/subtype_assigner_test.cpython-38.pyc,, +yapftests/__pycache__/utils.cpython-38.pyc,, +yapftests/__pycache__/yapf_test.cpython-38.pyc,, +yapftests/__pycache__/yapf_test_helper.cpython-38.pyc,, +yapftests/blank_line_calculator_test.py,sha256=Ir0Apdt2vtCRDpBJPXBQho6Jd74qR-IncBW4PtFP7VA,9145 +yapftests/comment_splicer_test.py,sha256=smvYTJaG99DLdK7nUcLZqCLuJxnzuqQUDmDfujGl3qg,11005 +yapftests/file_resources_test.py,sha256=5JKt0_nYXgVoXEt_Tbxy5VPlEsGnzQ8wIFN5KuEKY2M,19458 +yapftests/format_decision_state_test.py,sha256=JoMD4hZxJZF2gD7tb0YpHABmZtbdMOUNeXskbouhYYk,4398 +yapftests/format_token_test.py,sha256=9lPYuORy4ASNQr_cs_cIOJWZxfb_UnuyP44FDx3xfkA,2952 +yapftests/line_joiner_test.py,sha256=Af6Ka3wApOMHVL0-aYY7im8ChisZG5rP2WW6ZD-uhoA,2620 +yapftests/logical_line_test.py,sha256=PwR4I7A0ilUAfgfjkphtaGRHuPsPfz4ICRrym_0FUmQ,3221 +yapftests/main_test.py,sha256=sVfg9NGpM4upIkvIzMcM_-nDn5HcFPVPCDDxbBy97QU,4001 +yapftests/pytree_unwrapper_test.py,sha256=8B_hF6hegtOxU1A-Y3ZKYiHRe7ujC3EYZ8iGPaU_1Vw,9288 +yapftests/pytree_utils_test.py,sha256=5NeI1iorER5zipo_CwIsm5FBWtWqeeOjalygUMNaWas,8140 +yapftests/pytree_visitor_test.py,sha256=ljVsmUmIDNGnOpbgoEOmYL6SV-KAIDZoNfiALbKhrP0,4068 +yapftests/reformatter_basic_test.py,sha256=0SxHfp6A_ybiJq0YkoDa6ULFJwZ7ZTpXG26plYrXeZY,109654 +yapftests/reformatter_buganizer_test.py,sha256=yeeLQciMsUv8uQXRgBc3KYaPl5ilZfnh5COdvjCeXmw,86724 +yapftests/reformatter_facebook_test.py,sha256=B1vTxN0R7aXmZIsU3DmanToR0az-BZb3t9thvXDXg1w,15205 +yapftests/reformatter_pep8_test.py,sha256=isW9Rnrg2bdpHGbX1KTzj9CSQlrrPYFygd0SrMnxM2U,33565 +yapftests/reformatter_python3_test.py,sha256=hGUKHZDxeeqLWe4RIAnV2dKISWrosBc4gBP99ywT2Sg,14355 +yapftests/reformatter_style_config_test.py,sha256=GEjtztPKKCut6Ua3n0w9xc_6QpFtsrCNv8Itrbd7NAk,7215 +yapftests/reformatter_verify_test.py,sha256=Up03fDzqZ2SKmO1bHgjGwgBtiP4Sde7Uev88O-FdOIk,3223 +yapftests/split_penalty_test.py,sha256=oMFpOlCXFxdFUL54npVVkQa3-CfYy3EowQCDbyqhYJI,7417 +yapftests/style_test.py,sha256=ODU3GMQ8-Z7UpHsCLdzqMKivOCIlvI8zhZxmCpjn3BA,11579 +yapftests/subtype_assigner_test.py,sha256=68vAPDgKfnLsSMbiBNmSc8mN5hqpRCloNB9RIlReBU4,9372 +yapftests/utils.py,sha256=kbrj8PEnvmrV7kJUEOaIrqOGapkpXWYEoCVEqW2iugM,2708 +yapftests/yapf_test.py,sha256=cn6PJ89bNzP2oQLruG30Pz6dnDfkZOXLUz6DKjUWMhg,62954 +yapftests/yapf_test_helper.py,sha256=XxW9QzUZoUNJkkIwZmR-TtHvVCsw8YAqtn0IKA8WbY8,3034 diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/WHEEL b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/WHEEL new file mode 120000 index 0000000..53c4fb9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1d/7f/af/f7e9e8524c94a31c99d4f312b92edab94c43011e1505da182ea0f4c70b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/direct_url.json new file mode 100644 index 0000000..1fef1a6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=8fea849025584e486fd06d6ba2bed717f396080fd3cc236ba10cb97c4c51cf32"}, "url": "https://files.pythonhosted.org/packages/47/88/843c2e68f18a5879b4fbf37cb99fbabe1ffc4343b2e63191c8462235c008/yapf-0.32.0-py2.py3-none-any.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/entry_points.txt new file mode 120000 index 0000000..ae429b6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/entry_points.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/7d/ce/d6/09c63225647b4f097cb9b722fd4433e20b7e53d55891728364e0f11070 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/top_level.txt new file mode 120000 index 0000000..a9c9ef7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf-0.32.0.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/63/5b/0a/9b73abd7e2a504d5d4f33bf722e5d2271d65014f88fc2bffa9336af964 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/__init__.py b/venv/lib/python3.8/site-packages/yapf/__init__.py new file mode 120000 index 0000000..bd35a06 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a4/a0/86/058484ea655a2247c08b67d6cd0677717ee9709a7c6ba5a5503a528938 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/__main__.py b/venv/lib/python3.8/site-packages/yapf/__main__.py new file mode 120000 index 0000000..92890f8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/__main__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8c/9e/45/7307bab5b5f1a92b94c513b7b9e978e2d1cfe77bd549ca2396eb347013 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..25139bc Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/__pycache__/__main__.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000..5f58cdd Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/__pycache__/__main__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/third_party/__init__.py b/venv/lib/python3.8/site-packages/yapf/third_party/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/third_party/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/third_party/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/third_party/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..6ab822c Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/third_party/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/__init__.py b/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/__init__.py new file mode 120000 index 0000000..05b4099 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..fd6c6e5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/__pycache__/yapf_diff.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/__pycache__/yapf_diff.cpython-38.pyc new file mode 100644 index 0000000..ab1dccd Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/__pycache__/yapf_diff.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/yapf_diff.py b/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/yapf_diff.py new file mode 120000 index 0000000..647d3fa --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/third_party/yapf_diff/yapf_diff.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/75/f7/22/e08477d9a4ed24843c0348e2aa5472a3ea29b55c79a7fca60a5a98da78 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__init__.py b/venv/lib/python3.8/site-packages/yapf/yapflib/__init__.py new file mode 120000 index 0000000..2913850 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/69/b0/6a/86aa8f3f5d232baa4d084b795435597d82b4bb47d0ba04e9d800b42b89 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d37c97f Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/blank_line_calculator.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/blank_line_calculator.cpython-38.pyc new file mode 100644 index 0000000..27ba6a0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/blank_line_calculator.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/comment_splicer.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/comment_splicer.cpython-38.pyc new file mode 100644 index 0000000..e797572 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/comment_splicer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/continuation_splicer.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/continuation_splicer.cpython-38.pyc new file mode 100644 index 0000000..9fa023d Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/continuation_splicer.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/errors.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/errors.cpython-38.pyc new file mode 100644 index 0000000..62b59c8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/errors.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/file_resources.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/file_resources.cpython-38.pyc new file mode 100644 index 0000000..4f3eeca Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/file_resources.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/format_decision_state.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/format_decision_state.cpython-38.pyc new file mode 100644 index 0000000..3238317 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/format_decision_state.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/format_token.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/format_token.cpython-38.pyc new file mode 100644 index 0000000..bbdfd81 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/format_token.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/identify_container.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/identify_container.cpython-38.pyc new file mode 100644 index 0000000..1e678ad Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/identify_container.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/line_joiner.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/line_joiner.cpython-38.pyc new file mode 100644 index 0000000..28391ca Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/line_joiner.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/logical_line.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/logical_line.cpython-38.pyc new file mode 100644 index 0000000..759083e Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/logical_line.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/object_state.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/object_state.cpython-38.pyc new file mode 100644 index 0000000..de3fb2e Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/object_state.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/py3compat.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/py3compat.cpython-38.pyc new file mode 100644 index 0000000..19bf84c Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/py3compat.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/pytree_unwrapper.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/pytree_unwrapper.cpython-38.pyc new file mode 100644 index 0000000..ae59e35 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/pytree_unwrapper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/pytree_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/pytree_utils.cpython-38.pyc new file mode 100644 index 0000000..13550ce Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/pytree_utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/pytree_visitor.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/pytree_visitor.cpython-38.pyc new file mode 100644 index 0000000..dd049d8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/pytree_visitor.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/reformatter.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/reformatter.cpython-38.pyc new file mode 100644 index 0000000..2cf63fa Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/reformatter.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/split_penalty.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/split_penalty.cpython-38.pyc new file mode 100644 index 0000000..15b8dfc Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/split_penalty.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/style.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/style.cpython-38.pyc new file mode 100644 index 0000000..bbcdcef Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/style.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/subtype_assigner.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/subtype_assigner.cpython-38.pyc new file mode 100644 index 0000000..34c33d3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/subtype_assigner.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/subtypes.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/subtypes.cpython-38.pyc new file mode 100644 index 0000000..3aa1b3a Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/subtypes.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/verifier.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/verifier.cpython-38.pyc new file mode 100644 index 0000000..4cc223f Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/verifier.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/yapf_api.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/yapf_api.cpython-38.pyc new file mode 100644 index 0000000..d23d668 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapf/yapflib/__pycache__/yapf_api.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/blank_line_calculator.py b/venv/lib/python3.8/site-packages/yapf/yapflib/blank_line_calculator.py new file mode 120000 index 0000000..710388b --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/blank_line_calculator.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0f/25/69/8d0b258b249c1969316a43ea4071981b8ed2374db9d0387b84e4de1192 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/comment_splicer.py b/venv/lib/python3.8/site-packages/yapf/yapflib/comment_splicer.py new file mode 120000 index 0000000..2371d7e --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/comment_splicer.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/50/6c/98/111b385590749f99f9b5bbacf0da1ae2a358fc2f19192c0464a8a641af \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/continuation_splicer.py b/venv/lib/python3.8/site-packages/yapf/yapflib/continuation_splicer.py new file mode 120000 index 0000000..db04164 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/continuation_splicer.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e7/f3/69/ce7f7c48b212aa8a47da10777524c10d556cfe2b8264177e3494cd38a7 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/errors.py b/venv/lib/python3.8/site-packages/yapf/yapflib/errors.py new file mode 120000 index 0000000..03f279d --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/errors.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/91/09/75/45fda41acc9c75eb0693d724f43453901d1e2fae59d9996474dc4568db \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/file_resources.py b/venv/lib/python3.8/site-packages/yapf/yapflib/file_resources.py new file mode 120000 index 0000000..a6f8c47 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/file_resources.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a4/89/ec/6ab9f102dc7ed7db9bc4d07dca53340665a4b02f1fc3b5f759c59f31de \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/format_decision_state.py b/venv/lib/python3.8/site-packages/yapf/yapflib/format_decision_state.py new file mode 120000 index 0000000..f7ccc0a --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/format_decision_state.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/65/28/43/c15e1908ce29b24f3185b994af0352778177b9478220dd699db5f03194 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/format_token.py b/venv/lib/python3.8/site-packages/yapf/yapflib/format_token.py new file mode 120000 index 0000000..091c586 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/format_token.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0b/cd/ab/c0557d6742d41ea7191fdf6322937500d409fcabfe599c041b56d75c26 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/identify_container.py b/venv/lib/python3.8/site-packages/yapf/yapflib/identify_container.py new file mode 120000 index 0000000..1fc2960 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/identify_container.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0d/02/9e/8623cf2f43673a47ae0c1c2a83b6d2d04754fc50d1112cdb8f60b7e5c2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/line_joiner.py b/venv/lib/python3.8/site-packages/yapf/yapflib/line_joiner.py new file mode 120000 index 0000000..1aecf18 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/line_joiner.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1d/1f/e3/5508d1dec5fb9363dc27f77c71215fd9c84a396d7e8167f68b179961c6 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/logical_line.py b/venv/lib/python3.8/site-packages/yapf/yapflib/logical_line.py new file mode 120000 index 0000000..3d1ce8b --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/logical_line.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/30/f9/cf/9e22aa8ca261894f9520c199e879c94cd247949d9f217805b87731376a \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/object_state.py b/venv/lib/python3.8/site-packages/yapf/yapflib/object_state.py new file mode 120000 index 0000000..b64e7c5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/object_state.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f7/43/b6/859c825b1f1296c912b0a7078ae40101ab6a4ab564c16559e6d7b41eff \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/py3compat.py b/venv/lib/python3.8/site-packages/yapf/yapflib/py3compat.py new file mode 120000 index 0000000..95d5df5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/py3compat.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/af/e6/23/28e8fc64bb21d5ae35704e51644167419e3c08498720b8b0c66cecf833 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/pytree_unwrapper.py b/venv/lib/python3.8/site-packages/yapf/yapflib/pytree_unwrapper.py new file mode 120000 index 0000000..f5acb50 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/pytree_unwrapper.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c0/45/f2/7945692cbd1686ea250f03e11fded9c5a812a91bedec0fefb5339a99e2 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/pytree_utils.py b/venv/lib/python3.8/site-packages/yapf/yapflib/pytree_utils.py new file mode 120000 index 0000000..c2c2600 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/pytree_utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bc/b1/c0/aa7e4afc8f509bef5e48baeeeba5a253159eb0ac324f5fa76c9e7d20f9 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/pytree_visitor.py b/venv/lib/python3.8/site-packages/yapf/yapflib/pytree_visitor.py new file mode 120000 index 0000000..99ac7e6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/pytree_visitor.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e2/4e/2e/bab9b08141f688513ca2831014a6f93bab454270338bcf0888af960058 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/reformatter.py b/venv/lib/python3.8/site-packages/yapf/yapflib/reformatter.py new file mode 120000 index 0000000..6a5d5e3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/reformatter.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/0b/c3/d2/c98f829f2678aca6f507956d78f7c8c36fd20b4a23cb773cf91ea2e8c5 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/split_penalty.py b/venv/lib/python3.8/site-packages/yapf/yapflib/split_penalty.py new file mode 120000 index 0000000..79c2d00 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/split_penalty.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b5/e9/8f/939c1535a0152f3971cc76d6ac8b9cb8f999095a9581ea515a22cb6912 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/style.py b/venv/lib/python3.8/site-packages/yapf/yapflib/style.py new file mode 120000 index 0000000..7c2ac74 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/style.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e1/34/75/507bd39d45bce29231d6aad0d877b328cc1d6b160ba198716d54dcb27b \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/subtype_assigner.py b/venv/lib/python3.8/site-packages/yapf/yapflib/subtype_assigner.py new file mode 120000 index 0000000..ca03e46 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/subtype_assigner.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/39/67/e2/a99f4970cfd30ac6f5e0685e003c974bcd275785f55559d2dcd16ef529 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/subtypes.py b/venv/lib/python3.8/site-packages/yapf/yapflib/subtypes.py new file mode 120000 index 0000000..409e619 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/subtypes.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/03/9b/e1/dc303079e12eeecd07965dbb00182d542526a3d43fcaba1077e337f55c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/verifier.py b/venv/lib/python3.8/site-packages/yapf/yapflib/verifier.py new file mode 120000 index 0000000..2f6a483 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/verifier.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a9/3e/63/94cd63f7a8de57b630a5d09ccca5fcbca9b9d48e354c4d6690b5fc7649 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapf/yapflib/yapf_api.py b/venv/lib/python3.8/site-packages/yapf/yapflib/yapf_api.py new file mode 120000 index 0000000..8561385 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapf/yapflib/yapf_api.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/81/b5/9c/fd008c594939133a6f70bd811187b9bf0a7eda2b6ee16c851a8252fb29 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/__init__.py b/venv/lib/python3.8/site-packages/yapftests/__init__.py new file mode 120000 index 0000000..2913850 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/__init__.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/69/b0/6a/86aa8f3f5d232baa4d084b795435597d82b4bb47d0ba04e9d800b42b89 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4cff347 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/blank_line_calculator_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/blank_line_calculator_test.cpython-38.pyc new file mode 100644 index 0000000..404c6f1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/blank_line_calculator_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/comment_splicer_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/comment_splicer_test.cpython-38.pyc new file mode 100644 index 0000000..c1a99cd Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/comment_splicer_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/file_resources_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/file_resources_test.cpython-38.pyc new file mode 100644 index 0000000..f484d67 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/file_resources_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/format_decision_state_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/format_decision_state_test.cpython-38.pyc new file mode 100644 index 0000000..982d68f Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/format_decision_state_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/format_token_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/format_token_test.cpython-38.pyc new file mode 100644 index 0000000..a8a7380 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/format_token_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/line_joiner_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/line_joiner_test.cpython-38.pyc new file mode 100644 index 0000000..1234ce6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/line_joiner_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/logical_line_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/logical_line_test.cpython-38.pyc new file mode 100644 index 0000000..39ef584 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/logical_line_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/main_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/main_test.cpython-38.pyc new file mode 100644 index 0000000..651b6cf Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/main_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/pytree_unwrapper_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/pytree_unwrapper_test.cpython-38.pyc new file mode 100644 index 0000000..4c4d67e Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/pytree_unwrapper_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/pytree_utils_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/pytree_utils_test.cpython-38.pyc new file mode 100644 index 0000000..761d55b Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/pytree_utils_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/pytree_visitor_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/pytree_visitor_test.cpython-38.pyc new file mode 100644 index 0000000..ca7110d Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/pytree_visitor_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_basic_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_basic_test.cpython-38.pyc new file mode 100644 index 0000000..5840cff Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_basic_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_buganizer_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_buganizer_test.cpython-38.pyc new file mode 100644 index 0000000..5727e39 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_buganizer_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_facebook_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_facebook_test.cpython-38.pyc new file mode 100644 index 0000000..8f12a8a Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_facebook_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_pep8_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_pep8_test.cpython-38.pyc new file mode 100644 index 0000000..93a62f3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_pep8_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_python3_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_python3_test.cpython-38.pyc new file mode 100644 index 0000000..082949c Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_python3_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_style_config_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_style_config_test.cpython-38.pyc new file mode 100644 index 0000000..3df6774 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_style_config_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_verify_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_verify_test.cpython-38.pyc new file mode 100644 index 0000000..7225124 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/reformatter_verify_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/split_penalty_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/split_penalty_test.cpython-38.pyc new file mode 100644 index 0000000..ae0eda0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/split_penalty_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/style_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/style_test.cpython-38.pyc new file mode 100644 index 0000000..9054093 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/style_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/subtype_assigner_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/subtype_assigner_test.cpython-38.pyc new file mode 100644 index 0000000..9e78c69 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/subtype_assigner_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..c495fd6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/yapf_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/yapf_test.cpython-38.pyc new file mode 100644 index 0000000..27ef7b7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/yapf_test.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/__pycache__/yapf_test_helper.cpython-38.pyc b/venv/lib/python3.8/site-packages/yapftests/__pycache__/yapf_test_helper.cpython-38.pyc new file mode 100644 index 0000000..14a519e Binary files /dev/null and b/venv/lib/python3.8/site-packages/yapftests/__pycache__/yapf_test_helper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yapftests/blank_line_calculator_test.py b/venv/lib/python3.8/site-packages/yapftests/blank_line_calculator_test.py new file mode 120000 index 0000000..61916cf --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/blank_line_calculator_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/22/bd/00/a5db76bed0910e90493d7050868e8977be2a47e2277015b83ed14fed50 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/comment_splicer_test.py b/venv/lib/python3.8/site-packages/yapftests/comment_splicer_test.py new file mode 120000 index 0000000..8dd6003 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/comment_splicer_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b2/6b/d8/4c9686f7d0cb74aee751c2d9a822ee2719f3baa4140e60dfba31a5dea8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/file_resources_test.py b/venv/lib/python3.8/site-packages/yapftests/file_resources_test.py new file mode 120000 index 0000000..799bbb6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/file_resources_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e4/92/ad/d3f9d85e05685c4b7f4dbc72e553e512c1a7cd0f302053792ae10a6363 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/format_decision_state_test.py b/venv/lib/python3.8/site-packages/yapftests/format_decision_state_test.py new file mode 120000 index 0000000..f252a5a --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/format_decision_state_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/26/83/03/e21671259176803eed6f46291c006666d6dd30e50d797b246e8ba16189 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/format_token_test.py b/venv/lib/python3.8/site-packages/yapftests/format_token_test.py new file mode 120000 index 0000000..dfaca50 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/format_token_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f6/53/d8/b8e472e0048d42bfdcb3f708389599c5f6ff527bb23f8e050f1df17e40 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/line_joiner_test.py b/venv/lib/python3.8/site-packages/yapftests/line_joiner_test.py new file mode 120000 index 0000000..e058ad0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/line_joiner_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/01/fe/8a/6b7c00a4e30754bd3e69863b8a6f02862b191b9acfd965ba643fae8680 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/logical_line_test.py b/venv/lib/python3.8/site-packages/yapftests/logical_line_test.py new file mode 120000 index 0000000..d989a0b --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/logical_line_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/3f/04/78/23b0348a55007e07e392986d686447b8fb0f7f3e08091af29bfd055264 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/main_test.py b/venv/lib/python3.8/site-packages/yapftests/main_test.py new file mode 120000 index 0000000..9d63e48 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/main_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/b1/57/e0/f4d1a9338ba9224bc8ccc70cffe9c39f91dc14f54f0830f16c1cbded05 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/pytree_unwrapper_test.py b/venv/lib/python3.8/site-packages/yapftests/pytree_unwrapper_test.py new file mode 120000 index 0000000..648ea29 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/pytree_unwrapper_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/f0/1f/e1/17a85e82d3b153503e63764a6221d17bbba30b711867c8863da53fd55c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/pytree_utils_test.py b/venv/lib/python3.8/site-packages/yapftests/pytree_utils_test.py new file mode 120000 index 0000000..30175c8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/pytree_utils_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e4/d7/88/d62a2b111e738a9a3f0b022c9b91415ad5aa79e3a36a5ca050c35a59ab \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/pytree_visitor_test.py b/venv/lib/python3.8/site-packages/yapftests/pytree_visitor_test.py new file mode 120000 index 0000000..d10d862 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/pytree_visitor_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/96/35/6c/9949880cd1a73a96e0a043a660be9257e28020366835f8802db2a1acfd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/reformatter_basic_test.py b/venv/lib/python3.8/site-packages/yapftests/reformatter_basic_test.py new file mode 120000 index 0000000..f1b28b3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/reformatter_basic_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/d1/2c/47/7e9e80ff26e226ad189280dae942c527067b653a571b6ea9958ad77996 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/reformatter_buganizer_test.py b/venv/lib/python3.8/site-packages/yapftests/reformatter_buganizer_test.py new file mode 120000 index 0000000..36bdc4b --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/reformatter_buganizer_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/c9/e7/8b/41c88cb14bfcb905d180173729868f9798a565f9e1e4239dbe309e5e6c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/reformatter_facebook_test.py b/venv/lib/python3.8/site-packages/yapftests/reformatter_facebook_test.py new file mode 120000 index 0000000..ad0e6c0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/reformatter_facebook_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/07/5b/d3/c4dd11eda5e6648b14dc399a9d3a11d1acfe0596f7b7db61bd70d7835c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/reformatter_pep8_test.py b/venv/lib/python3.8/site-packages/yapftests/reformatter_pep8_test.py new file mode 120000 index 0000000..7981d51 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/reformatter_pep8_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/8a/c5/bd/467ae0d9b7691c66d7d4a4f38fd092425aeb3d817281dd12acc9f13365 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/reformatter_python3_test.py b/venv/lib/python3.8/site-packages/yapftests/reformatter_python3_test.py new file mode 120000 index 0000000..b2eb437 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/reformatter_python3_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/84/65/0a/1d90f179ea8b59ee112009d5d9d288496ae8b017388013fdf72c13d928 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/reformatter_style_config_test.py b/venv/lib/python3.8/site-packages/yapftests/reformatter_style_config_test.py new file mode 120000 index 0000000..f2f57f5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/reformatter_style_config_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/18/48/ed/ced3ca282bade946b79f4c3dc5cffa42916db2b08dbfc22dadb77b3409 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/reformatter_verify_test.py b/venv/lib/python3.8/site-packages/yapftests/reformatter_verify_test.py new file mode 120000 index 0000000..17eb4ee --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/reformatter_verify_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/52/9d/37/7c3cea67648a98ed5b1e08c6c2006d88fe1275eed47aff3c3be15d3889 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/split_penalty_test.py b/venv/lib/python3.8/site-packages/yapftests/split_penalty_test.py new file mode 120000 index 0000000..5f5781a --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/split_penalty_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a0/c1/69/3a509717174550be789e95559106b7f827d8cb7128c100836f2aa16092 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/style_test.py b/venv/lib/python3.8/site-packages/yapftests/style_test.py new file mode 120000 index 0000000..eeb03c1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/style_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/38/35/37/18c43cf99ed4a47b022ddcea30a8af382225bc8f33859c660a98e7dc10 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/subtype_assigner_test.py b/venv/lib/python3.8/site-packages/yapftests/subtype_assigner_test.py new file mode 120000 index 0000000..6322bba --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/subtype_assigner_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/eb/cb/c0/3c380a7e72ec48c6e204d99273c98de61aa9442968341f5122545e054e \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/utils.py b/venv/lib/python3.8/site-packages/yapftests/utils.py new file mode 120000 index 0000000..9977b90 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/utils.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/91/ba/e3/f0f127be6ad5ee425410e688aea3866a99295d6604a02544a96da2ba03 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/yapf_test.py b/venv/lib/python3.8/site-packages/yapftests/yapf_test.py new file mode 120000 index 0000000..b9c1b18 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/yapf_test.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/72/7e/8f/27cf5b3733f6a102ebb86df43f3e9d9c37e464e5cb533e832a35163218 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yapftests/yapf_test_helper.py b/venv/lib/python3.8/site-packages/yapftests/yapf_test_helper.py new file mode 120000 index 0000000..db8b368 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yapftests/yapf_test_helper.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/5f/15/bd/433519a1434992423066647e4ed1ef542b30f1802ab67d08280f166d8f \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/LICENSE b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/LICENSE new file mode 120000 index 0000000..104b2af --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/56/d6/ac/6c8105c0a51304c21db060e361af9a8ea0af9a75c239c28b5d13693838 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/METADATA new file mode 100644 index 0000000..66ef349 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/METADATA @@ -0,0 +1,867 @@ +Metadata-Version: 2.1 +Name: yarl +Version: 1.8.1 +Summary: Yet another URL library +Home-page: https://github.com/aio-libs/yarl/ +Author: Andrew Svetlov +Author-email: andrew.svetlov@gmail.com +License: Apache 2 +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: multidict (>=4.0) +Requires-Dist: idna (>=2.0) +Requires-Dist: typing-extensions (>=3.7.4) ; python_version < "3.8" + +yarl +==== + +.. image:: https://github.com/aio-libs/yarl/workflows/CI/badge.svg + :target: https://github.com/aio-libs/yarl/actions?query=workflow%3ACI + :align: right + +.. image:: https://codecov.io/gh/aio-libs/yarl/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/yarl + +.. image:: https://badge.fury.io/py/yarl.svg + :target: https://badge.fury.io/py/yarl + + +.. image:: https://readthedocs.org/projects/yarl/badge/?version=latest + :target: https://yarl.readthedocs.io + + +.. image:: https://img.shields.io/pypi/pyversions/yarl.svg + :target: https://pypi.python.org/pypi/yarl + +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + +Introduction +------------ + +Url is constructed from ``str``: + +.. code-block:: pycon + + >>> from yarl import URL + >>> url = URL('https://www.python.org/~guido?arg=1#frag') + >>> url + URL('https://www.python.org/~guido?arg=1#frag') + +All url parts: *scheme*, *user*, *password*, *host*, *port*, *path*, +*query* and *fragment* are accessible by properties: + +.. code-block:: pycon + + >>> url.scheme + 'https' + >>> url.host + 'www.python.org' + >>> url.path + '/~guido' + >>> url.query_string + 'arg=1' + >>> url.query + + >>> url.fragment + 'frag' + +All url manipulations produce a new url object: + +.. code-block:: pycon + + >>> url = URL('https://www.python.org') + >>> url / 'foo' / 'bar' + URL('https://www.python.org/foo/bar') + >>> url / 'foo' % {'bar': 'baz'} + URL('https://www.python.org/foo?bar=baz') + +Strings passed to constructor and modification methods are +automatically encoded giving canonical representation as result: + +.. code-block:: pycon + + >>> url = URL('https://www.python.org/путь') + >>> url + URL('https://www.python.org/%D0%BF%D1%83%D1%82%D1%8C') + +Regular properties are *percent-decoded*, use ``raw_`` versions for +getting *encoded* strings: + +.. code-block:: pycon + + >>> url.path + '/путь' + + >>> url.raw_path + '/%D0%BF%D1%83%D1%82%D1%8C' + +Human readable representation of URL is available as ``.human_repr()``: + +.. code-block:: pycon + + >>> url.human_repr() + 'https://www.python.org/путь' + +For full documentation please read https://yarl.readthedocs.org. + + +Installation +------------ + +:: + + $ pip install yarl + +The library is Python 3 only! + +PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install +``yarl`` on another operating system (like *Alpine Linux*, which is not +manylinux-compliant because of the missing glibc and therefore, cannot be +used with our wheels) the the tarball will be used to compile the library from +the source code. It requires a C compiler and and Python headers installed. + +To skip the compilation you must explicitly opt-in by setting the `YARL_NO_EXTENSIONS` +environment variable to a non-empty value, e.g.: + +.. code-block:: bash + + $ YARL_NO_EXTENSIONS=1 pip install yarl + +Please note that the pure-Python (uncompiled) version is much slower. However, +PyPy always uses a pure-Python implementation, and, as such, it is unaffected +by this variable. + +Dependencies +------------ + +YARL requires multidict_ library. + + +API documentation +------------------ + +The documentation is located at https://yarl.readthedocs.org + + +Why isn't boolean supported by the URL query API? +------------------------------------------------- + +There is no standard for boolean representation of boolean values. + +Some systems prefer ``true``/``false``, others like ``yes``/``no``, ``on``/``off``, +``Y``/``N``, ``1``/``0``, etc. + +``yarl`` cannot make an unambiguous decision on how to serialize ``bool`` values because +it is specific to how the end-user's application is built and would be different for +different apps. The library doesn't accept booleans in the API; a user should convert +bools into strings using own preferred translation protocol. + + +Comparison with other URL libraries +------------------------------------ + +* furl (https://pypi.python.org/pypi/furl) + + The library has rich functionality but the ``furl`` object is mutable. + + I'm afraid to pass this object into foreign code: who knows if the + code will modify my url in a terrible way while I just want to send URL + with handy helpers for accessing URL properties. + + ``furl`` has other non-obvious tricky things but the main objection + is mutability. + +* URLObject (https://pypi.python.org/pypi/URLObject) + + URLObject is immutable, that's pretty good. + + Every URL change generates a new URL object. + + But the library doesn't do any decode/encode transformations leaving the + end user to cope with these gory details. + + +Source code +----------- + +The project is hosted on GitHub_ + +Please file an issue on the `bug tracker +`_ if you have found a bug +or have some suggestion in order to improve the library. + +The library uses `Azure Pipelines `_ for +Continuous Integration. + +Discussion list +--------------- + +*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs + +Feel free to post your questions and ideas here. + + +Authors and License +------------------- + +The ``yarl`` package is written by Andrew Svetlov. + +It's *Apache 2* licensed and freely available. + + +.. _GitHub: https://github.com/aio-libs/yarl + +.. _multidict: https://github.com/aio-libs/multidict + + +========= +Changelog +========= + +.. + You should *NOT* be adding new change log entries to this file, this + file is managed by towncrier. You *may* edit previous change logs to + fix problems like typo corrections or such. + To add a new change log entry, please see + https://pip.pypa.io/en/latest/development/#adding-a-news-entry + we named the news folder "changes". + + WARNING: Don't drop the next directive! + +.. towncrier release notes start + +1.8.1 (2022-08-01) +================== + +Misc +---- + +- `#694 `_, `#699 `_, `#700 `_, `#701 `_, `#702 `_, `#703 `_, `#739 `_ + + +1.8.0 (2022-08-01) +================== + +Features +-------- + +- Added ``URL.raw_suffix``, ``URL.suffix``, ``URL.raw_suffixes``, ``URL.suffixes``, ``URL.with_suffix``. (`#613 `_) + + +Improved Documentation +---------------------- + +- Fixed broken internal references to ``(?P=rendered_text)``. (`#665 `_) +- Fixed broken external references to ``(?P=rendered_text)`` docs. (`#665 `_) + + +Deprecations and Removals +------------------------- + +- Dropped Python 3.6 support. (`#672 `_) + + +Misc +---- + +- `#646 `_, `#699 `_, `#701 `_ + + +1.7.2 (2021-11-01) +================== + +Bugfixes +-------- + +- Changed call in ``with_port()`` to stop reencoding parts of the URL that were already encoded. (`#623 `_) + + +1.7.1 (2021-10-07) +================== + +Bugfixes +-------- + +- Fix 1.7.0 build error + +1.7.0 (2021-10-06) +================== + +Features +-------- + +- Add `__bytes__()` magic method so that `bytes(url)` will work and use optimal ASCII encoding. (`#582 `_) +- Started shipping platform-specific arm64 wheels for Apple Silicon. (`#622 `_) +- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes. (`#622 `_) +- Added support for Python 3.10. (`#622 `_) + + +1.6.3 (2020-11-14) +================== + +Bugfixes +-------- + +- No longer loose characters when decoding incorrect percent-sequences (like ``%e2%82%f8``). All non-decodable percent-sequences are now preserved. + `#517 `_ +- Provide x86 Windows wheels. + `#535 `_ + + +---- + + +1.6.2 (2020-10-12) +================== + + +Bugfixes +-------- + +- Provide generated ``.c`` files in TarBall distribution. + `#530 `_ + +1.6.1 (2020-10-12) +================== + +Features +-------- + +- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on + Linux as well as ``x86_64``. + `#507 `_ +- Provide wheels for Python 3.9. + `#526 `_ + +Bugfixes +-------- + +- ``human_repr()`` now always produces valid representation equivalent to the original URL (if the original URL is valid). + `#511 `_ +- Fixed requoting a single percent followed by a percent-encoded character in the Cython implementation. + `#514 `_ +- Fix ValueError when decoding ``%`` which is not followed by two hexadecimal digits. + `#516 `_ +- Fix decoding ``%`` followed by a space and hexadecimal digit. + `#520 `_ +- Fix annotation of ``with_query()``/``update_query()`` methods for ``key=[val1, val2]`` case. + `#528 `_ + +Removal +------- + +- Drop Python 3.5 support; Python 3.6 is the minimal supported Python version. + + +---- + + +1.6.0 (2020-09-23) +================== + +Features +-------- + +- Allow for int and float subclasses in query, while still denying bool. + `#492 `_ + + +Bugfixes +-------- + +- Do not requote arguments in ``URL.build()``, ``with_xxx()`` and in ``/`` operator. + `#502 `_ +- Keep IPv6 brackets in ``origin()``. + `#504 `_ + + +---- + + +1.5.1 (2020-08-01) +================== + +Bugfixes +-------- + +- Fix including relocated internal ``yarl._quoting_c`` C-extension into published PyPI dists. + `#485 `_ + + +Misc +---- + +- `#484 `_ + + +---- + + +1.5.0 (2020-07-26) +================== + +Features +-------- + +- Convert host to lowercase on URL building. + `#386 `_ +- Allow using ``mod`` operator (`%`) for updating query string (an alias for ``update_query()`` method). + `#435 `_ +- Allow use of sequences such as ``list`` and ``tuple`` in the values + of a mapping such as ``dict`` to represent that a key has many values:: + + url = URL("http://example.com") + assert url.with_query({"a": [1, 2]}) == URL("http://example.com/?a=1&a=2") + + `#443 `_ +- Support URL.build() with scheme and path (creates a relative URL). + `#464 `_ +- Cache slow IDNA encode/decode calls. + `#476 `_ +- Add ``@final`` / ``Final`` type hints + `#477 `_ +- Support URL authority/raw_authority properties and authority argument of ``URL.build()`` method. + `#478 `_ +- Hide the library implementation details, make the exposed public list very clean. + `#483 `_ + + +Bugfixes +-------- + +- Fix tests with newer Python (3.7.6, 3.8.1 and 3.9.0+). + `#409 `_ +- Fix a bug where query component, passed in a form of mapping or sequence, is unquoted in unexpected way. + `#426 `_ +- Hide `Query` and `QueryVariable` type aliases in `__init__.pyi`, now they are prefixed with underscore. + `#431 `_ +- Keep ipv6 brackets after updating port/user/password. + `#451 `_ + + +---- + + +1.4.2 (2019-12-05) +================== + +Features +-------- + +- Workaround for missing `str.isascii()` in Python 3.6 + `#389 `_ + + +---- + + +1.4.1 (2019-11-29) +================== + +* Fix regression, make the library work on Python 3.5 and 3.6 again. + +1.4.0 (2019-11-29) +================== + +* Distinguish an empty password in URL from a password not provided at all (#262) + +* Fixed annotations for optional parameters of ``URL.build`` (#309) + +* Use None as default value of ``user`` parameter of ``URL.build`` (#309) + +* Enforce building C Accelerated modules when installing from source tarball, use + ``YARL_NO_EXTENSIONS`` environment variable for falling back to (slower) Pure Python + implementation (#329) + +* Drop Python 3.5 support + +* Fix quoting of plus in path by pure python version (#339) + +* Don't create a new URL if fragment is unchanged (#292) + +* Included in error msg the path that produces starting slash forbidden error (#376) + +* Skip slow IDNA encoding for ASCII-only strings (#387) + + +1.3.0 (2018-12-11) +================== + +* Fix annotations for ``query`` parameter (#207) + +* An incoming query sequence can have int variables (the same as for + Mapping type) (#208) + +* Add ``URL.explicit_port`` property (#218) + +* Give a friendlier error when port can't be converted to int (#168) + +* ``bool(URL())`` now returns ``False`` (#272) + +1.2.6 (2018-06-14) +================== + +* Drop Python 3.4 trove classifier (#205) + +1.2.5 (2018-05-23) +================== + +* Fix annotations for ``build`` (#199) + +1.2.4 (2018-05-08) +================== + +* Fix annotations for ``cached_property`` (#195) + +1.2.3 (2018-05-03) +================== + +* Accept ``str`` subclasses in ``URL`` constructor (#190) + +1.2.2 (2018-05-01) +================== + +* Fix build + +1.2.1 (2018-04-30) +================== + +* Pin minimal required Python to 3.5.3 (#189) + +1.2.0 (2018-04-30) +================== + +* Forbid inheritance, replace ``__init__`` with ``__new__`` (#171) + +* Support PEP-561 (provide type hinting marker) (#182) + +1.1.1 (2018-02-17) +================== + +* Fix performance regression: don't encode empty netloc (#170) + +1.1.0 (2018-01-21) +================== + +* Make pure Python quoter consistent with Cython version (#162) + +1.0.0 (2018-01-15) +================== + +* Use fast path if quoted string does not need requoting (#154) + +* Speed up quoting/unquoting by ``_Quoter`` and ``_Unquoter`` classes (#155) + +* Drop ``yarl.quote`` and ``yarl.unquote`` public functions (#155) + +* Add custom string writer, reuse static buffer if available (#157) + Code is 50-80 times faster than Pure Python version (was 4-5 times faster) + +* Don't recode IP zone (#144) + +* Support ``encoded=True`` in ``yarl.URL.build()`` (#158) + +* Fix updating query with multiple keys (#160) + +0.18.0 (2018-01-10) +=================== + +* Fallback to IDNA 2003 if domain name is not IDNA 2008 compatible (#152) + +0.17.0 (2017-12-30) +=================== + +* Use IDNA 2008 for domain name processing (#149) + +0.16.0 (2017-12-07) +=================== + +* Fix raising ``TypeError`` by ``url.query_string()`` after + ``url.with_query({})`` (empty mapping) (#141) + +0.15.0 (2017-11-23) +=================== + +* Add ``raw_path_qs`` attribute (#137) + +0.14.2 (2017-11-14) +=================== + +* Restore ``strict`` parameter as no-op in ``quote`` / ``unquote`` + +0.14.1 (2017-11-13) +=================== + +* Restore ``strict`` parameter as no-op for sake of compatibility with + aiohttp 2.2 + +0.14.0 (2017-11-11) +=================== + +* Drop strict mode (#123) + +* Fix ``"ValueError: Unallowed PCT %"`` when there's a ``"%"`` in the url (#124) + +0.13.0 (2017-10-01) +=================== + +* Document ``encoded`` parameter (#102) + +* Support relative urls like ``'?key=value'`` (#100) + +* Unsafe encoding for QS fixed. Encode ``;`` char in value param (#104) + +* Process passwords without user names (#95) + +0.12.0 (2017-06-26) +=================== + +* Properly support paths without leading slash in ``URL.with_path()`` (#90) + +* Enable type annotation checks + +0.11.0 (2017-06-26) +=================== + +* Normalize path (#86) + +* Clear query and fragment parts in ``.with_path()`` (#85) + +0.10.3 (2017-06-13) +=================== + +* Prevent double URL args unquoting (#83) + +0.10.2 (2017-05-05) +=================== + +* Unexpected hash behaviour (#75) + + +0.10.1 (2017-05-03) +=================== + +* Unexpected compare behaviour (#73) + +* Do not quote or unquote + if not a query string. (#74) + + +0.10.0 (2017-03-14) +=================== + +* Added ``URL.build`` class method (#58) + +* Added ``path_qs`` attribute (#42) + + +0.9.8 (2017-02-16) +================== + +* Do not quote ``:`` in path + + +0.9.7 (2017-02-16) +================== + +* Load from pickle without _cache (#56) + +* Percent-encoded pluses in path variables become spaces (#59) + + +0.9.6 (2017-02-15) +================== + +* Revert backward incompatible change (BaseURL) + + +0.9.5 (2017-02-14) +================== + +* Fix BaseURL rich comparison support + + +0.9.4 (2017-02-14) +================== + +* Use BaseURL + + +0.9.3 (2017-02-14) +================== + +* Added BaseURL + + +0.9.2 (2017-02-08) +================== + +* Remove debug print + + +0.9.1 (2017-02-07) +================== + +* Do not lose tail chars (#45) + + +0.9.0 (2017-02-07) +================== + +* Allow to quote ``%`` in non strict mode (#21) + +* Incorrect parsing of query parameters with %3B (;) inside (#34) + +* Fix core dumps (#41) + +* tmpbuf - compiling error (#43) + +* Added ``URL.update_path()`` method + +* Added ``URL.update_query()`` method (#47) + + +0.8.1 (2016-12-03) +================== + +* Fix broken aiohttp: revert back ``quote`` / ``unquote``. + + +0.8.0 (2016-12-03) +================== + +* Support more verbose error messages in ``.with_query()`` (#24) + +* Don't percent-encode ``@`` and ``:`` in path (#32) + +* Don't expose ``yarl.quote`` and ``yarl.unquote``, these functions are + part of private API + +0.7.1 (2016-11-18) +================== + +* Accept not only ``str`` but all classes inherited from ``str`` also (#25) + +0.7.0 (2016-11-07) +================== + +* Accept ``int`` as value for ``.with_query()`` + +0.6.0 (2016-11-07) +================== + +* Explicitly use UTF8 encoding in setup.py (#20) +* Properly unquote non-UTF8 strings (#19) + +0.5.3 (2016-11-02) +================== + +* Don't use namedtuple fields but indexes on URL construction + +0.5.2 (2016-11-02) +================== + +* Inline ``_encode`` class method + +0.5.1 (2016-11-02) +================== + +* Make URL construction faster by removing extra classmethod calls + +0.5.0 (2016-11-02) +================== + +* Add cython optimization for quoting/unquoting +* Provide binary wheels + +0.4.3 (2016-09-29) +================== + +* Fix typing stubs + +0.4.2 (2016-09-29) +================== + +* Expose ``quote()`` and ``unquote()`` as public API + +0.4.1 (2016-09-28) +================== + +* Support empty values in query (``'/path?arg'``) + +0.4.0 (2016-09-27) +================== + +* Introduce ``relative()`` (#16) + +0.3.2 (2016-09-27) +================== + +* Typo fixes #15 + +0.3.1 (2016-09-26) +================== + +* Support sequence of pairs as ``with_query()`` parameter + +0.3.0 (2016-09-26) +================== + +* Introduce ``is_default_port()`` + +0.2.1 (2016-09-26) +================== + +* Raise ValueError for URLs like 'http://:8080/' + +0.2.0 (2016-09-18) +================== + +* Avoid doubling slashes when joining paths (#13) + +* Appending path starting from slash is forbidden (#12) + +0.1.4 (2016-09-09) +================== + +* Add kwargs support for ``with_query()`` (#10) + +0.1.3 (2016-09-07) +================== + +* Document ``with_query()``, ``with_fragment()`` and ``origin()`` + +* Allow ``None`` for ``with_query()`` and ``with_fragment()`` + +0.1.2 (2016-09-07) +================== + +* Fix links, tune docs theme. + +0.1.1 (2016-09-06) +================== + +* Update README, old version used obsolete API + +0.1.0 (2016-09-06) +================== + +* The library was deeply refactored, bytes are gone away but all + accepted strings are encoded if needed. + +0.0.1 (2016-08-30) +================== + +* The first release. diff --git a/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/RECORD new file mode 100644 index 0000000..b112359 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/RECORD @@ -0,0 +1,21 @@ +yarl-1.8.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +yarl-1.8.1.dist-info/LICENSE,sha256=VtasbIEFwKUTBMIdsGDjYa-ajqCvmnXCOcKLXRNpODg,609 +yarl-1.8.1.dist-info/METADATA,sha256=miGXpqjgTKfnHjfF_OaWDj5fSflKjCUMzCHTVW3e4ts,21168 +yarl-1.8.1.dist-info/RECORD,, +yarl-1.8.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +yarl-1.8.1.dist-info/WHEEL,sha256=-ijGDuALlPxm3HbhKntps0QzHsi-DPlXqgerYTTJkFE,148 +yarl-1.8.1.dist-info/direct_url.json,sha256=zIrDYBJbooorcXB65TMhiD8oOFhyjmzb3lBO_fYAZvI,286 +yarl-1.8.1.dist-info/top_level.txt,sha256=vf3SJuQh-k7YtvsUrV_OPOrT9Kqn0COlk7IPYyhtGkQ,5 +yarl/__init__.py,sha256=fjrukeRbTWwAxpA2u31Znyr0EBu_LDID-29qpQEGvPU,154 +yarl/__init__.pyi,sha256=ES2kbXk-0ra-aCz_VI1accDanuHfkoW964hbRfqSYeU,3883 +yarl/__pycache__/__init__.cpython-38.pyc,, +yarl/__pycache__/_quoting.cpython-38.pyc,, +yarl/__pycache__/_quoting_py.cpython-38.pyc,, +yarl/__pycache__/_url.cpython-38.pyc,, +yarl/_quoting.py,sha256=A35UGLJW4EOOHZNNocSiK7tRNGKe1NgqH3jxMoYN2x0,519 +yarl/_quoting_c.cpython-38-x86_64-linux-gnu.so,sha256=5u4vntnPPFIGCtCmb_EDLTNdVCYkOilLEgvgcyyENgA,826264 +yarl/_quoting_c.pyi,sha256=plPeUkIbXp4Hzi0AbYII4JA0H9tCjtjw-UUPU5Na1s0,447 +yarl/_quoting_c.pyx,sha256=BfSYcmmdSmffiztCvGoV8CVvkPKtvScMhL2OkVF5P1w,11498 +yarl/_quoting_py.py,sha256=K419wlfyIlGM7-UbxobWv8lOkger8Ut7rrgoJ4xDgAU,6370 +yarl/_url.py,sha256=CNyoLNCUKyqoaV-8WWTBJqd8F2HHaWWjBS-5AeKwCRs,36426 +yarl/py.typed,sha256=ay5OMO475PlcZ_Fbun9maHW7Y6MBTk0UXL4ztHx3Iug,14 diff --git a/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/WHEEL new file mode 120000 index 0000000..8b6429d --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/fa/28/c6/0ee00b94fc66dc76e12a7b69b344331ec8be0cf957aa07ab6134c99051 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/direct_url.json b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/direct_url.json new file mode 100644 index 0000000..e9bee16 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/direct_url.json @@ -0,0 +1 @@ +{"archive_info": {"hash": "sha256=ea513a25976d21733bff523e0ca836ef1679630ef4ad22d46987d04b372d57fc"}, "url": "https://files.pythonhosted.org/packages/dd/cf/5e7d506b9b5add77548252d6938380e08caf59ba808ac3cf0f0635629d1a/yarl-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"} \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/top_level.txt new file mode 120000 index 0000000..cfb2c96 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl-1.8.1.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/bd/fd/d2/26e421fa4ed8b6fb14ad5fce3cead3f4aaa7d023a593b20f63286d1a44 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yarl/__init__.py b/venv/lib/python3.8/site-packages/yarl/__init__.py new file mode 100644 index 0000000..27be88c --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl/__init__.py @@ -0,0 +1,5 @@ +from ._url import URL, cache_clear, cache_configure, cache_info + +__version__ = "1.8.1" + +__all__ = ("URL", "cache_clear", "cache_configure", "cache_info") diff --git a/venv/lib/python3.8/site-packages/yarl/__init__.pyi b/venv/lib/python3.8/site-packages/yarl/__init__.pyi new file mode 100644 index 0000000..fc761b5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl/__init__.pyi @@ -0,0 +1,118 @@ +import sys +from functools import _CacheInfo +from typing import Any, Mapping, Optional, Sequence, Tuple, Type, Union, overload + +import multidict + +if sys.version_info >= (3, 8): + from typing import Final, TypedDict, final +else: + from typing_extensions import Final, TypedDict, final + +_SimpleQuery = Union[str, int, float] +_QueryVariable = Union[_SimpleQuery, Sequence[_SimpleQuery]] +_Query = Union[ + None, str, Mapping[str, _QueryVariable], Sequence[Tuple[str, _QueryVariable]] +] + +@final +class URL: + scheme: Final[str] + raw_user: Final[str] + user: Final[Optional[str]] + raw_password: Final[Optional[str]] + password: Final[Optional[str]] + raw_host: Final[Optional[str]] + host: Final[Optional[str]] + port: Final[Optional[int]] + raw_authority: Final[str] + authority: Final[str] + raw_path: Final[str] + path: Final[str] + raw_query_string: Final[str] + query_string: Final[str] + path_qs: Final[str] + raw_path_qs: Final[str] + raw_fragment: Final[str] + fragment: Final[str] + query: Final[multidict.MultiDict[str]] + raw_name: Final[str] + name: Final[str] + raw_suffix: Final[str] + suffix: Final[str] + raw_suffixes: Final[Tuple[str, ...]] + suffixes: Final[Tuple[str, ...]] + raw_parts: Final[Tuple[str, ...]] + parts: Final[Tuple[str, ...]] + parent: Final[URL] + def __init__( + self, val: Union[str, "URL"] = ..., *, encoded: bool = ... + ) -> None: ... + @classmethod + def build( + cls, + *, + scheme: str = ..., + authority: str = ..., + user: Optional[str] = ..., + password: Optional[str] = ..., + host: str = ..., + port: Optional[int] = ..., + path: str = ..., + query: Optional[_Query] = ..., + query_string: str = ..., + fragment: str = ..., + encoded: bool = ... + ) -> URL: ... + def __str__(self) -> str: ... + def __repr__(self) -> str: ... + def __eq__(self, other: Any) -> bool: ... + def __le__(self, other: Any) -> bool: ... + def __lt__(self, other: Any) -> bool: ... + def __ge__(self, other: Any) -> bool: ... + def __gt__(self, other: Any) -> bool: ... + def __hash__(self) -> int: ... + def __truediv__(self, name: str) -> URL: ... + def __mod__(self, query: _Query) -> URL: ... + def is_absolute(self) -> bool: ... + def is_default_port(self) -> bool: ... + def origin(self) -> URL: ... + def relative(self) -> URL: ... + def with_scheme(self, scheme: str) -> URL: ... + def with_user(self, user: Optional[str]) -> URL: ... + def with_password(self, password: Optional[str]) -> URL: ... + def with_host(self, host: str) -> URL: ... + def with_port(self, port: Optional[int]) -> URL: ... + def with_path(self, path: str, *, encoded: bool = ...) -> URL: ... + @overload + def with_query(self, query: _Query) -> URL: ... + @overload + def with_query(self, **kwargs: _QueryVariable) -> URL: ... + @overload + def update_query(self, query: _Query) -> URL: ... + @overload + def update_query(self, **kwargs: _QueryVariable) -> URL: ... + def with_fragment(self, fragment: Optional[str]) -> URL: ... + def with_name(self, name: str) -> URL: ... + def with_suffix(self, suffix: str) -> URL: ... + def join(self, url: URL) -> URL: ... + def human_repr(self) -> str: ... + # private API + @classmethod + def _normalize_path(cls, path: str) -> str: ... + +@final +class cached_property: + def __init__(self, wrapped: Any) -> None: ... + def __get__(self, inst: URL, owner: Type[URL]) -> Any: ... + def __set__(self, inst: URL, value: Any) -> None: ... + +class CacheInfo(TypedDict): + idna_encode: _CacheInfo + idna_decode: _CacheInfo + +def cache_clear() -> None: ... +def cache_info() -> CacheInfo: ... +def cache_configure( + *, idna_encode_size: Optional[int] = ..., idna_decode_size: Optional[int] = ... +) -> None: ... diff --git a/venv/lib/python3.8/site-packages/yarl/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/yarl/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..1c109ba Binary files /dev/null and b/venv/lib/python3.8/site-packages/yarl/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yarl/__pycache__/_quoting.cpython-38.pyc b/venv/lib/python3.8/site-packages/yarl/__pycache__/_quoting.cpython-38.pyc new file mode 100644 index 0000000..18b1ce9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yarl/__pycache__/_quoting.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yarl/__pycache__/_quoting_py.cpython-38.pyc b/venv/lib/python3.8/site-packages/yarl/__pycache__/_quoting_py.cpython-38.pyc new file mode 100644 index 0000000..475d547 Binary files /dev/null and b/venv/lib/python3.8/site-packages/yarl/__pycache__/_quoting_py.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yarl/__pycache__/_url.cpython-38.pyc b/venv/lib/python3.8/site-packages/yarl/__pycache__/_url.cpython-38.pyc new file mode 100644 index 0000000..2b1e1ac Binary files /dev/null and b/venv/lib/python3.8/site-packages/yarl/__pycache__/_url.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/yarl/_quoting.py b/venv/lib/python3.8/site-packages/yarl/_quoting.py new file mode 120000 index 0000000..9d633de --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl/_quoting.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/03/7e/54/18b256e0438e1d934da1c4a22bbb5134629ed4d82a1f78f132860ddb1d \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yarl/_quoting_c.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/yarl/_quoting_c.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..69cd00e Binary files /dev/null and b/venv/lib/python3.8/site-packages/yarl/_quoting_c.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/yarl/_quoting_c.pyi b/venv/lib/python3.8/site-packages/yarl/_quoting_c.pyi new file mode 120000 index 0000000..65af32f --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl/_quoting_c.pyi @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/a6/53/de/52421b5e9e07ce2d006d8208e090341fdb428ed8f0f9450f53935ad6cd \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/yarl/_quoting_c.pyx b/venv/lib/python3.8/site-packages/yarl/_quoting_c.pyx new file mode 100644 index 0000000..5335d17 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl/_quoting_c.pyx @@ -0,0 +1,371 @@ +# cython: language_level=3 + +from cpython.exc cimport PyErr_NoMemory +from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc +from cpython.unicode cimport PyUnicode_DecodeASCII, PyUnicode_DecodeUTF8Stateful +from libc.stdint cimport uint8_t, uint64_t +from libc.string cimport memcpy, memset + +from string import ascii_letters, digits + + +cdef str GEN_DELIMS = ":/?#[]@" +cdef str SUB_DELIMS_WITHOUT_QS = "!$'()*," +cdef str SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + '+?=;' +cdef str RESERVED = GEN_DELIMS + SUB_DELIMS +cdef str UNRESERVED = ascii_letters + digits + '-._~' +cdef str ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS +cdef str QS = '+&=;' + +DEF BUF_SIZE = 8 * 1024 # 8KiB +cdef char BUFFER[BUF_SIZE] + +cdef inline Py_UCS4 _to_hex(uint8_t v): + if v < 10: + return (v+0x30) # ord('0') == 0x30 + else: + return (v+0x41-10) # ord('A') == 0x41 + + +cdef inline int _from_hex(Py_UCS4 v): + if '0' <= v <= '9': + return (v) - 0x30 # ord('0') == 0x30 + elif 'A' <= v <= 'F': + return (v) - 0x41 + 10 # ord('A') == 0x41 + elif 'a' <= v <= 'f': + return (v) - 0x61 + 10 # ord('a') == 0x61 + else: + return -1 + + +cdef inline int _is_lower_hex(Py_UCS4 v): + return 'a' <= v <= 'f' + + +cdef inline Py_UCS4 _restore_ch(Py_UCS4 d1, Py_UCS4 d2): + cdef int digit1 = _from_hex(d1) + if digit1 < 0: + return -1 + cdef int digit2 = _from_hex(d2) + if digit2 < 0: + return -1 + return (digit1 << 4 | digit2) + + +cdef uint8_t ALLOWED_TABLE[16] +cdef uint8_t ALLOWED_NOTQS_TABLE[16] + + +cdef inline bint bit_at(uint8_t array[], uint64_t ch): + return array[ch >> 3] & (1 << (ch & 7)) + + +cdef inline void set_bit(uint8_t array[], uint64_t ch): + array[ch >> 3] |= (1 << (ch & 7)) + + +memset(ALLOWED_TABLE, 0, sizeof(ALLOWED_TABLE)) +memset(ALLOWED_NOTQS_TABLE, 0, sizeof(ALLOWED_NOTQS_TABLE)) + +for i in range(128): + if chr(i) in ALLOWED: + set_bit(ALLOWED_TABLE, i) + set_bit(ALLOWED_NOTQS_TABLE, i) + if chr(i) in QS: + set_bit(ALLOWED_NOTQS_TABLE, i) + +# ----------------- writer --------------------------- + +cdef struct Writer: + char *buf + Py_ssize_t size + Py_ssize_t pos + bint changed + + +cdef inline void _init_writer(Writer* writer): + writer.buf = &BUFFER[0] + writer.size = BUF_SIZE + writer.pos = 0 + writer.changed = 0 + + +cdef inline void _release_writer(Writer* writer): + if writer.buf != BUFFER: + PyMem_Free(writer.buf) + + +cdef inline int _write_char(Writer* writer, Py_UCS4 ch, bint changed): + cdef char * buf + cdef Py_ssize_t size + + if writer.pos == writer.size: + # reallocate + size = writer.size + BUF_SIZE + if writer.buf == BUFFER: + buf = PyMem_Malloc(size) + if buf == NULL: + PyErr_NoMemory() + return -1 + memcpy(buf, writer.buf, writer.size) + else: + buf = PyMem_Realloc(writer.buf, size) + if buf == NULL: + PyErr_NoMemory() + return -1 + writer.buf = buf + writer.size = size + writer.buf[writer.pos] = ch + writer.pos += 1 + writer.changed |= changed + return 0 + + +cdef inline int _write_pct(Writer* writer, uint8_t ch, bint changed): + if _write_char(writer, '%', changed) < 0: + return -1 + if _write_char(writer, _to_hex(ch >> 4), changed) < 0: + return -1 + return _write_char(writer, _to_hex(ch & 0x0f), changed) + + +cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): + cdef uint64_t utf = symbol + + if utf < 0x80: + return _write_pct(writer, utf, True) + elif utf < 0x800: + if _write_pct(writer, (0xc0 | (utf >> 6)), True) < 0: + return -1 + return _write_pct(writer, (0x80 | (utf & 0x3f)), True) + elif 0xD800 <= utf <= 0xDFFF: + # surogate pair, ignored + return 0 + elif utf < 0x10000: + if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: + return -1 + if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), + True) < 0: + return -1 + return _write_pct(writer, (0x80 | (utf & 0x3f)), True) + elif utf > 0x10FFFF: + # symbol is too large + return 0 + else: + if _write_pct(writer, (0xf0 | (utf >> 18)), True) < 0: + return -1 + if _write_pct(writer, (0x80 | ((utf >> 12) & 0x3f)), + True) < 0: + return -1 + if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), + True) < 0: + return -1 + return _write_pct(writer, (0x80 | (utf & 0x3f)), True) + + +# --------------------- end writer -------------------------- + + +cdef class _Quoter: + cdef bint _qs + cdef bint _requote + + cdef uint8_t _safe_table[16] + cdef uint8_t _protected_table[16] + + def __init__( + self, *, str safe='', str protected='', bint qs=False, bint requote=True, + ): + cdef Py_UCS4 ch + + self._qs = qs + self._requote = requote + + if not self._qs: + memcpy(self._safe_table, + ALLOWED_NOTQS_TABLE, + sizeof(self._safe_table)) + else: + memcpy(self._safe_table, + ALLOWED_TABLE, + sizeof(self._safe_table)) + for ch in safe: + if ord(ch) > 127: + raise ValueError("Only safe symbols with ORD < 128 are allowed") + set_bit(self._safe_table, ch) + + memset(self._protected_table, 0, sizeof(self._protected_table)) + for ch in protected: + if ord(ch) > 127: + raise ValueError("Only safe symbols with ORD < 128 are allowed") + set_bit(self._safe_table, ch) + set_bit(self._protected_table, ch) + + def __call__(self, val): + cdef Writer writer + if val is None: + return None + if type(val) is not str: + if isinstance(val, str): + # derived from str + val = str(val) + else: + raise TypeError("Argument should be str") + _init_writer(&writer) + try: + return self._do_quote(val, &writer) + finally: + _release_writer(&writer) + + cdef str _do_quote(self, str val, Writer *writer): + cdef Py_UCS4 ch + cdef int changed + cdef int idx = 0 + cdef int length = len(val) + + while idx < length: + ch = val[idx] + idx += 1 + if ch == '%' and self._requote and idx <= length - 2: + ch = _restore_ch(val[idx], val[idx + 1]) + if ch != -1: + idx += 2 + if ch < 128: + if bit_at(self._protected_table, ch): + if _write_pct(writer, ch, True) < 0: + raise + continue + + if bit_at(self._safe_table, ch): + if _write_char(writer, ch, True) < 0: + raise + continue + + changed = (_is_lower_hex(val[idx - 2]) or + _is_lower_hex(val[idx - 1])) + if _write_pct(writer, ch, changed) < 0: + raise + continue + else: + ch = '%' + + if self._write(writer, ch) < 0: + raise + + if not writer.changed: + return val + else: + return PyUnicode_DecodeASCII(writer.buf, writer.pos, "strict") + + cdef inline int _write(self, Writer *writer, Py_UCS4 ch): + if self._qs: + if ch == ' ': + return _write_char(writer, '+', True) + + if ch < 128 and bit_at(self._safe_table, ch): + return _write_char(writer, ch, False) + + return _write_utf8(writer, ch) + + +cdef class _Unquoter: + cdef str _unsafe + cdef bint _qs + cdef _Quoter _quoter + cdef _Quoter _qs_quoter + + def __init__(self, *, unsafe='', qs=False): + self._unsafe = unsafe + self._qs = qs + self._quoter = _Quoter() + self._qs_quoter = _Quoter(qs=True) + + def __call__(self, val): + if val is None: + return None + if type(val) is not str: + if isinstance(val, str): + # derived from str + val = str(val) + else: + raise TypeError("Argument should be str") + return self._do_unquote(val) + + cdef str _do_unquote(self, str val): + if len(val) == 0: + return val + cdef list ret = [] + cdef char buffer[4] + cdef Py_ssize_t buflen = 0 + cdef Py_ssize_t consumed + cdef str unquoted + cdef Py_UCS4 ch = 0 + cdef Py_ssize_t idx = 0 + cdef Py_ssize_t length = len(val) + cdef Py_ssize_t start_pct + + while idx < length: + ch = val[idx] + idx += 1 + if ch == '%' and idx <= length - 2: + ch = _restore_ch(val[idx], val[idx + 1]) + if ch != -1: + idx += 2 + assert buflen < 4 + buffer[buflen] = ch + buflen += 1 + try: + unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, + NULL, &consumed) + except UnicodeDecodeError: + start_pct = idx - buflen * 3 + buffer[0] = ch + buflen = 1 + ret.append(val[start_pct : idx - 3]) + try: + unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, + NULL, &consumed) + except UnicodeDecodeError: + buflen = 0 + ret.append(val[idx - 3 : idx]) + continue + if not unquoted: + assert consumed == 0 + continue + assert consumed == buflen + buflen = 0 + if self._qs and unquoted in '+=&;': + ret.append(self._qs_quoter(unquoted)) + elif unquoted in self._unsafe: + ret.append(self._quoter(unquoted)) + else: + ret.append(unquoted) + continue + else: + ch = '%' + + if buflen: + start_pct = idx - 1 - buflen * 3 + ret.append(val[start_pct : idx - 1]) + buflen = 0 + + if ch == '+': + if not self._qs or ch in self._unsafe: + ret.append('+') + else: + ret.append(' ') + continue + + if ch in self._unsafe: + ret.append('%') + h = hex(ord(ch)).upper()[2:] + for ch in h: + ret.append(ch) + continue + + ret.append(ch) + + if buflen: + ret.append(val[length - buflen * 3 : length]) + + return ''.join(ret) diff --git a/venv/lib/python3.8/site-packages/yarl/_quoting_py.py b/venv/lib/python3.8/site-packages/yarl/_quoting_py.py new file mode 100644 index 0000000..585a1da --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl/_quoting_py.py @@ -0,0 +1,197 @@ +import codecs +import re +from string import ascii_letters, ascii_lowercase, digits +from typing import Optional, cast + +BASCII_LOWERCASE = ascii_lowercase.encode("ascii") +BPCT_ALLOWED = {f"%{i:02X}".encode("ascii") for i in range(256)} +GEN_DELIMS = ":/?#[]@" +SUB_DELIMS_WITHOUT_QS = "!$'()*," +SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + "+&=;" +RESERVED = GEN_DELIMS + SUB_DELIMS +UNRESERVED = ascii_letters + digits + "-._~" +ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS + + +_IS_HEX = re.compile(b"[A-Z0-9][A-Z0-9]") +_IS_HEX_STR = re.compile("[A-Fa-f0-9][A-Fa-f0-9]") + +utf8_decoder = codecs.getincrementaldecoder("utf-8") + + +class _Quoter: + def __init__( + self, + *, + safe: str = "", + protected: str = "", + qs: bool = False, + requote: bool = True, + ) -> None: + self._safe = safe + self._protected = protected + self._qs = qs + self._requote = requote + + def __call__(self, val: Optional[str]) -> Optional[str]: + if val is None: + return None + if not isinstance(val, str): + raise TypeError("Argument should be str") + if not val: + return "" + bval = cast(str, val).encode("utf8", errors="ignore") + ret = bytearray() + pct = bytearray() + safe = self._safe + safe += ALLOWED + if not self._qs: + safe += "+&=;" + safe += self._protected + bsafe = safe.encode("ascii") + idx = 0 + while idx < len(bval): + ch = bval[idx] + idx += 1 + + if pct: + if ch in BASCII_LOWERCASE: + ch = ch - 32 # convert to uppercase + pct.append(ch) + if len(pct) == 3: # pragma: no branch # peephole optimizer + buf = pct[1:] + if not _IS_HEX.match(buf): + ret.extend(b"%25") + pct.clear() + idx -= 2 + continue + try: + unquoted = chr(int(pct[1:].decode("ascii"), base=16)) + except ValueError: + ret.extend(b"%25") + pct.clear() + idx -= 2 + continue + + if unquoted in self._protected: + ret.extend(pct) + elif unquoted in safe: + ret.append(ord(unquoted)) + else: + ret.extend(pct) + pct.clear() + + # special case, if we have only one char after "%" + elif len(pct) == 2 and idx == len(bval): + ret.extend(b"%25") + pct.clear() + idx -= 1 + + continue + + elif ch == ord("%") and self._requote: + pct.clear() + pct.append(ch) + + # special case if "%" is last char + if idx == len(bval): + ret.extend(b"%25") + + continue + + if self._qs: + if ch == ord(" "): + ret.append(ord("+")) + continue + if ch in bsafe: + ret.append(ch) + continue + + ret.extend((f"%{ch:02X}").encode("ascii")) + + ret2 = ret.decode("ascii") + if ret2 == val: + return val + return ret2 + + +class _Unquoter: + def __init__(self, *, unsafe: str = "", qs: bool = False) -> None: + self._unsafe = unsafe + self._qs = qs + self._quoter = _Quoter() + self._qs_quoter = _Quoter(qs=True) + + def __call__(self, val: Optional[str]) -> Optional[str]: + if val is None: + return None + if not isinstance(val, str): + raise TypeError("Argument should be str") + if not val: + return "" + decoder = cast(codecs.BufferedIncrementalDecoder, utf8_decoder()) + ret = [] + idx = 0 + while idx < len(val): + ch = val[idx] + idx += 1 + if ch == "%" and idx <= len(val) - 2: + pct = val[idx : idx + 2] + if _IS_HEX_STR.fullmatch(pct): + b = bytes([int(pct, base=16)]) + idx += 2 + try: + unquoted = decoder.decode(b) + except UnicodeDecodeError: + start_pct = idx - 3 - len(decoder.buffer) * 3 + ret.append(val[start_pct : idx - 3]) + decoder.reset() + try: + unquoted = decoder.decode(b) + except UnicodeDecodeError: + ret.append(val[idx - 3 : idx]) + continue + if not unquoted: + continue + if self._qs and unquoted in "+=&;": + to_add = self._qs_quoter(unquoted) + if to_add is None: # pragma: no cover + raise RuntimeError("Cannot quote None") + ret.append(to_add) + elif unquoted in self._unsafe: + to_add = self._quoter(unquoted) + if to_add is None: # pragma: no cover + raise RuntimeError("Cannot quote None") + ret.append(to_add) + else: + ret.append(unquoted) + continue + + if decoder.buffer: + start_pct = idx - 1 - len(decoder.buffer) * 3 + ret.append(val[start_pct : idx - 1]) + decoder.reset() + + if ch == "+": + if not self._qs or ch in self._unsafe: + ret.append("+") + else: + ret.append(" ") + continue + + if ch in self._unsafe: + ret.append("%") + h = hex(ord(ch)).upper()[2:] + for ch in h: + ret.append(ch) + continue + + ret.append(ch) + + if decoder.buffer: + ret.append(val[-len(decoder.buffer) * 3 :]) + + ret2 = "".join(ret) + if ret2 == val: + return val + return ret2 diff --git a/venv/lib/python3.8/site-packages/yarl/_url.py b/venv/lib/python3.8/site-packages/yarl/_url.py new file mode 100644 index 0000000..dfcff2e --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl/_url.py @@ -0,0 +1,1159 @@ +import functools +import math +import warnings +from collections.abc import Mapping, Sequence +from ipaddress import ip_address +from urllib.parse import SplitResult, parse_qsl, quote, urljoin, urlsplit, urlunsplit + +import idna +from multidict import MultiDict, MultiDictProxy + +from ._quoting import _Quoter, _Unquoter + +DEFAULT_PORTS = {"http": 80, "https": 443, "ws": 80, "wss": 443} + +sentinel = object() + + +def rewrite_module(obj: object) -> object: + obj.__module__ = "yarl" + return obj + + +class cached_property: + """Use as a class method decorator. It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + + """ + + def __init__(self, wrapped): + self.wrapped = wrapped + try: + self.__doc__ = wrapped.__doc__ + except AttributeError: # pragma: no cover + self.__doc__ = "" + self.name = wrapped.__name__ + + def __get__(self, inst, owner, _sentinel=sentinel): + if inst is None: + return self + val = inst._cache.get(self.name, _sentinel) + if val is not _sentinel: + return val + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + + def __set__(self, inst, value): + raise AttributeError("cached property is read-only") + + +@rewrite_module +class URL: + # Don't derive from str + # follow pathlib.Path design + # probably URL will not suffer from pathlib problems: + # it's intended for libraries like aiohttp, + # not to be passed into standard library functions like os.open etc. + + # URL grammar (RFC 3986) + # pct-encoded = "%" HEXDIG HEXDIG + # reserved = gen-delims / sub-delims + # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" + # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" + # / "*" / "+" / "," / ";" / "=" + # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" + # URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ] + # hier-part = "//" authority path-abempty + # / path-absolute + # / path-rootless + # / path-empty + # scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) + # authority = [ userinfo "@" ] host [ ":" port ] + # userinfo = *( unreserved / pct-encoded / sub-delims / ":" ) + # host = IP-literal / IPv4address / reg-name + # IP-literal = "[" ( IPv6address / IPvFuture ) "]" + # IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" ) + # IPv6address = 6( h16 ":" ) ls32 + # / "::" 5( h16 ":" ) ls32 + # / [ h16 ] "::" 4( h16 ":" ) ls32 + # / [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + # / [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + # / [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + # / [ *4( h16 ":" ) h16 ] "::" ls32 + # / [ *5( h16 ":" ) h16 ] "::" h16 + # / [ *6( h16 ":" ) h16 ] "::" + # ls32 = ( h16 ":" h16 ) / IPv4address + # ; least-significant 32 bits of address + # h16 = 1*4HEXDIG + # ; 16 bits of address represented in hexadecimal + # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet + # dec-octet = DIGIT ; 0-9 + # / %x31-39 DIGIT ; 10-99 + # / "1" 2DIGIT ; 100-199 + # / "2" %x30-34 DIGIT ; 200-249 + # / "25" %x30-35 ; 250-255 + # reg-name = *( unreserved / pct-encoded / sub-delims ) + # port = *DIGIT + # path = path-abempty ; begins with "/" or is empty + # / path-absolute ; begins with "/" but not "//" + # / path-noscheme ; begins with a non-colon segment + # / path-rootless ; begins with a segment + # / path-empty ; zero characters + # path-abempty = *( "/" segment ) + # path-absolute = "/" [ segment-nz *( "/" segment ) ] + # path-noscheme = segment-nz-nc *( "/" segment ) + # path-rootless = segment-nz *( "/" segment ) + # path-empty = 0 + # segment = *pchar + # segment-nz = 1*pchar + # segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" ) + # ; non-zero-length segment without any colon ":" + # pchar = unreserved / pct-encoded / sub-delims / ":" / "@" + # query = *( pchar / "/" / "?" ) + # fragment = *( pchar / "/" / "?" ) + # URI-reference = URI / relative-ref + # relative-ref = relative-part [ "?" query ] [ "#" fragment ] + # relative-part = "//" authority path-abempty + # / path-absolute + # / path-noscheme + # / path-empty + # absolute-URI = scheme ":" hier-part [ "?" query ] + __slots__ = ("_cache", "_val") + + _QUOTER = _Quoter(requote=False) + _REQUOTER = _Quoter() + _PATH_QUOTER = _Quoter(safe="@:", protected="/+", requote=False) + _PATH_REQUOTER = _Quoter(safe="@:", protected="/+") + _QUERY_QUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True, requote=False) + _QUERY_REQUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True) + _QUERY_PART_QUOTER = _Quoter(safe="?/:@", qs=True, requote=False) + _FRAGMENT_QUOTER = _Quoter(safe="?/:@", requote=False) + _FRAGMENT_REQUOTER = _Quoter(safe="?/:@") + + _UNQUOTER = _Unquoter() + _PATH_UNQUOTER = _Unquoter(unsafe="+") + _QS_UNQUOTER = _Unquoter(qs=True) + + def __new__(cls, val="", *, encoded=False, strict=None): + if strict is not None: # pragma: no cover + warnings.warn("strict parameter is ignored") + if type(val) is cls: + return val + if type(val) is str: + val = urlsplit(val) + elif type(val) is SplitResult: + if not encoded: + raise ValueError("Cannot apply decoding to SplitResult") + elif isinstance(val, str): + val = urlsplit(str(val)) + else: + raise TypeError("Constructor parameter should be str") + + if not encoded: + if not val[1]: # netloc + netloc = "" + host = "" + else: + host = val.hostname + if host is None: + raise ValueError("Invalid URL: host is required for absolute urls") + + try: + port = val.port + except ValueError as e: + raise ValueError( + "Invalid URL: port can't be converted to integer" + ) from e + + netloc = cls._make_netloc( + val.username, val.password, host, port, encode=True, requote=True + ) + path = cls._PATH_REQUOTER(val[2]) + if netloc: + path = cls._normalize_path(path) + + cls._validate_authority_uri_abs_path(host=host, path=path) + query = cls._QUERY_REQUOTER(val[3]) + fragment = cls._FRAGMENT_REQUOTER(val[4]) + val = SplitResult(val[0], netloc, path, query, fragment) + + self = object.__new__(cls) + self._val = val + self._cache = {} + return self + + @classmethod + def build( + cls, + *, + scheme="", + authority="", + user=None, + password=None, + host="", + port=None, + path="", + query=None, + query_string="", + fragment="", + encoded=False, + ): + """Creates and returns a new URL""" + + if authority and (user or password or host or port): + raise ValueError( + 'Can\'t mix "authority" with "user", "password", "host" or "port".' + ) + if port and not host: + raise ValueError('Can\'t build URL with "port" but without "host".') + if query and query_string: + raise ValueError('Only one of "query" or "query_string" should be passed') + if ( + scheme is None + or authority is None + or path is None + or query_string is None + or fragment is None + ): + raise TypeError( + 'NoneType is illegal for "scheme", "authority", "path", ' + '"query_string", and "fragment" args, use empty string instead.' + ) + + if authority: + if encoded: + netloc = authority + else: + tmp = SplitResult("", authority, "", "", "") + netloc = cls._make_netloc( + tmp.username, tmp.password, tmp.hostname, tmp.port, encode=True + ) + elif not user and not password and not host and not port: + netloc = "" + else: + netloc = cls._make_netloc( + user, password, host, port, encode=not encoded, encode_host=not encoded + ) + if not encoded: + path = cls._PATH_QUOTER(path) + if netloc: + path = cls._normalize_path(path) + + cls._validate_authority_uri_abs_path(host=host, path=path) + query_string = cls._QUERY_QUOTER(query_string) + fragment = cls._FRAGMENT_QUOTER(fragment) + + url = cls( + SplitResult(scheme, netloc, path, query_string, fragment), encoded=True + ) + + if query: + return url.with_query(query) + else: + return url + + def __init_subclass__(cls): + raise TypeError(f"Inheriting a class {cls!r} from URL is forbidden") + + def __str__(self): + val = self._val + if not val.path and self.is_absolute() and (val.query or val.fragment): + val = val._replace(path="/") + return urlunsplit(val) + + def __repr__(self): + return f"{self.__class__.__name__}('{str(self)}')" + + def __bytes__(self): + return str(self).encode("ascii") + + def __eq__(self, other): + if not type(other) is URL: + return NotImplemented + + val1 = self._val + if not val1.path and self.is_absolute(): + val1 = val1._replace(path="/") + + val2 = other._val + if not val2.path and other.is_absolute(): + val2 = val2._replace(path="/") + + return val1 == val2 + + def __hash__(self): + ret = self._cache.get("hash") + if ret is None: + val = self._val + if not val.path and self.is_absolute(): + val = val._replace(path="/") + ret = self._cache["hash"] = hash(val) + return ret + + def __le__(self, other): + if not type(other) is URL: + return NotImplemented + return self._val <= other._val + + def __lt__(self, other): + if not type(other) is URL: + return NotImplemented + return self._val < other._val + + def __ge__(self, other): + if not type(other) is URL: + return NotImplemented + return self._val >= other._val + + def __gt__(self, other): + if not type(other) is URL: + return NotImplemented + return self._val > other._val + + def __truediv__(self, name): + name = self._PATH_QUOTER(name) + if name.startswith("/"): + raise ValueError( + f"Appending path {name!r} starting from slash is forbidden" + ) + path = self._val.path + if path == "/": + new_path = "/" + name + elif not path and not self.is_absolute(): + new_path = name + else: + parts = path.rstrip("/").split("/") + parts.append(name) + new_path = "/".join(parts) + if self.is_absolute(): + new_path = self._normalize_path(new_path) + return URL( + self._val._replace(path=new_path, query="", fragment=""), encoded=True + ) + + def __mod__(self, query): + return self.update_query(query) + + def __bool__(self) -> bool: + return bool( + self._val.netloc or self._val.path or self._val.query or self._val.fragment + ) + + def __getstate__(self): + return (self._val,) + + def __setstate__(self, state): + if state[0] is None and isinstance(state[1], dict): + # default style pickle + self._val = state[1]["_val"] + else: + self._val, *unused = state + self._cache = {} + + def is_absolute(self): + """A check for absolute URLs. + + Return True for absolute ones (having scheme or starting + with //), False otherwise. + + """ + return self.raw_host is not None + + def is_default_port(self): + """A check for default port. + + Return True if port is default for specified scheme, + e.g. 'http://python.org' or 'http://python.org:80', False + otherwise. + + """ + if self.port is None: + return False + default = DEFAULT_PORTS.get(self.scheme) + if default is None: + return False + return self.port == default + + def origin(self): + """Return an URL with scheme, host and port parts only. + + user, password, path, query and fragment are removed. + + """ + # TODO: add a keyword-only option for keeping user/pass maybe? + if not self.is_absolute(): + raise ValueError("URL should be absolute") + if not self._val.scheme: + raise ValueError("URL should have scheme") + v = self._val + netloc = self._make_netloc(None, None, v.hostname, v.port) + val = v._replace(netloc=netloc, path="", query="", fragment="") + return URL(val, encoded=True) + + def relative(self): + """Return a relative part of the URL. + + scheme, user, password, host and port are removed. + + """ + if not self.is_absolute(): + raise ValueError("URL should be absolute") + val = self._val._replace(scheme="", netloc="") + return URL(val, encoded=True) + + @property + def scheme(self): + """Scheme for absolute URLs. + + Empty string for relative URLs or URLs starting with // + + """ + return self._val.scheme + + @property + def raw_authority(self): + """Encoded authority part of URL. + + Empty string for relative URLs. + + """ + return self._val.netloc + + @cached_property + def authority(self): + """Decoded authority part of URL. + + Empty string for relative URLs. + + """ + return self._make_netloc( + self.user, self.password, self.host, self.port, encode_host=False + ) + + @property + def raw_user(self): + """Encoded user part of URL. + + None if user is missing. + + """ + # not .username + ret = self._val.username + if not ret: + return None + return ret + + @cached_property + def user(self): + """Decoded user part of URL. + + None if user is missing. + + """ + return self._UNQUOTER(self.raw_user) + + @property + def raw_password(self): + """Encoded password part of URL. + + None if password is missing. + + """ + return self._val.password + + @cached_property + def password(self): + """Decoded password part of URL. + + None if password is missing. + + """ + return self._UNQUOTER(self.raw_password) + + @property + def raw_host(self): + """Encoded host part of URL. + + None for relative URLs. + + """ + # Use host instead of hostname for sake of shortness + # May add .hostname prop later + return self._val.hostname + + @cached_property + def host(self): + """Decoded host part of URL. + + None for relative URLs. + + """ + raw = self.raw_host + if raw is None: + return None + if "%" in raw: + # Hack for scoped IPv6 addresses like + # fe80::2%Проверка + # presence of '%' sign means only IPv6 address, so idna is useless. + return raw + return _idna_decode(raw) + + @property + def port(self): + """Port part of URL, with scheme-based fallback. + + None for relative URLs or URLs without explicit port and + scheme without default port substitution. + + """ + return self._val.port or DEFAULT_PORTS.get(self._val.scheme) + + @property + def explicit_port(self): + """Port part of URL, without scheme-based fallback. + + None for relative URLs or URLs without explicit port. + + """ + return self._val.port + + @property + def raw_path(self): + """Encoded path of URL. + + / for absolute URLs without path part. + + """ + ret = self._val.path + if not ret and self.is_absolute(): + ret = "/" + return ret + + @cached_property + def path(self): + """Decoded path of URL. + + / for absolute URLs without path part. + + """ + return self._PATH_UNQUOTER(self.raw_path) + + @cached_property + def query(self): + """A MultiDictProxy representing parsed query parameters in decoded + representation. + + Empty value if URL has no query part. + + """ + ret = MultiDict(parse_qsl(self.raw_query_string, keep_blank_values=True)) + return MultiDictProxy(ret) + + @property + def raw_query_string(self): + """Encoded query part of URL. + + Empty string if query is missing. + + """ + return self._val.query + + @cached_property + def query_string(self): + """Decoded query part of URL. + + Empty string if query is missing. + + """ + return self._QS_UNQUOTER(self.raw_query_string) + + @cached_property + def path_qs(self): + """Decoded path of URL with query.""" + if not self.query_string: + return self.path + return f"{self.path}?{self.query_string}" + + @cached_property + def raw_path_qs(self): + """Encoded path of URL with query.""" + if not self.raw_query_string: + return self.raw_path + return f"{self.raw_path}?{self.raw_query_string}" + + @property + def raw_fragment(self): + """Encoded fragment part of URL. + + Empty string if fragment is missing. + + """ + return self._val.fragment + + @cached_property + def fragment(self): + """Decoded fragment part of URL. + + Empty string if fragment is missing. + + """ + return self._UNQUOTER(self.raw_fragment) + + @cached_property + def raw_parts(self): + """A tuple containing encoded *path* parts. + + ('/',) for absolute URLs if *path* is missing. + + """ + path = self._val.path + if self.is_absolute(): + if not path: + parts = ["/"] + else: + parts = ["/"] + path[1:].split("/") + else: + if path.startswith("/"): + parts = ["/"] + path[1:].split("/") + else: + parts = path.split("/") + return tuple(parts) + + @cached_property + def parts(self): + """A tuple containing decoded *path* parts. + + ('/',) for absolute URLs if *path* is missing. + + """ + return tuple(self._UNQUOTER(part) for part in self.raw_parts) + + @cached_property + def parent(self): + """A new URL with last part of path removed and cleaned up query and + fragment. + + """ + path = self.raw_path + if not path or path == "/": + if self.raw_fragment or self.raw_query_string: + return URL(self._val._replace(query="", fragment=""), encoded=True) + return self + parts = path.split("/") + val = self._val._replace(path="/".join(parts[:-1]), query="", fragment="") + return URL(val, encoded=True) + + @cached_property + def raw_name(self): + """The last part of raw_parts.""" + parts = self.raw_parts + if self.is_absolute(): + parts = parts[1:] + if not parts: + return "" + else: + return parts[-1] + else: + return parts[-1] + + @cached_property + def name(self): + """The last part of parts.""" + return self._UNQUOTER(self.raw_name) + + @cached_property + def raw_suffix(self): + name = self.raw_name + i = name.rfind(".") + if 0 < i < len(name) - 1: + return name[i:] + else: + return "" + + @cached_property + def suffix(self): + return self._UNQUOTER(self.raw_suffix) + + @cached_property + def raw_suffixes(self): + name = self.raw_name + if name.endswith("."): + return () + name = name.lstrip(".") + return tuple("." + suffix for suffix in name.split(".")[1:]) + + @cached_property + def suffixes(self): + return tuple(self._UNQUOTER(suffix) for suffix in self.raw_suffixes) + + @staticmethod + def _validate_authority_uri_abs_path(host, path): + """Ensure that path in URL with authority starts with a leading slash. + + Raise ValueError if not. + """ + if len(host) > 0 and len(path) > 0 and not path.startswith("/"): + raise ValueError( + "Path in a URL with authority should start with a slash ('/') if set" + ) + + @classmethod + def _normalize_path(cls, path): + # Drop '.' and '..' from path + + segments = path.split("/") + resolved_path = [] + + for seg in segments: + if seg == "..": + try: + resolved_path.pop() + except IndexError: + # ignore any .. segments that would otherwise cause an + # IndexError when popped from resolved_path if + # resolving for rfc3986 + pass + elif seg == ".": + continue + else: + resolved_path.append(seg) + + if segments[-1] in (".", ".."): + # do some post-processing here. + # if the last segment was a relative dir, + # then we need to append the trailing '/' + resolved_path.append("") + + return "/".join(resolved_path) + + @classmethod + def _encode_host(cls, host, human=False): + try: + ip, sep, zone = host.partition("%") + ip = ip_address(ip) + except ValueError: + host = host.lower() + # IDNA encoding is slow, + # skip it for ASCII-only strings + # Don't move the check into _idna_encode() helper + # to reduce the cache size + if human or host.isascii(): + return host + host = _idna_encode(host) + else: + host = ip.compressed + if sep: + host += "%" + zone + if ip.version == 6: + host = "[" + host + "]" + return host + + @classmethod + def _make_netloc( + cls, user, password, host, port, encode=False, encode_host=True, requote=False + ): + quoter = cls._REQUOTER if requote else cls._QUOTER + if encode_host: + ret = cls._encode_host(host) + else: + ret = host + if port: + ret = ret + ":" + str(port) + if password is not None: + if not user: + user = "" + else: + if encode: + user = quoter(user) + if encode: + password = quoter(password) + user = user + ":" + password + elif user and encode: + user = quoter(user) + if user: + ret = user + "@" + ret + return ret + + def with_scheme(self, scheme): + """Return a new URL with scheme replaced.""" + # N.B. doesn't cleanup query/fragment + if not isinstance(scheme, str): + raise TypeError("Invalid scheme type") + if not self.is_absolute(): + raise ValueError("scheme replacement is not allowed for relative URLs") + return URL(self._val._replace(scheme=scheme.lower()), encoded=True) + + def with_user(self, user): + """Return a new URL with user replaced. + + Autoencode user if needed. + + Clear user/password if user is None. + + """ + # N.B. doesn't cleanup query/fragment + val = self._val + if user is None: + password = None + elif isinstance(user, str): + user = self._QUOTER(user) + password = val.password + else: + raise TypeError("Invalid user type") + if not self.is_absolute(): + raise ValueError("user replacement is not allowed for relative URLs") + return URL( + self._val._replace( + netloc=self._make_netloc(user, password, val.hostname, val.port) + ), + encoded=True, + ) + + def with_password(self, password): + """Return a new URL with password replaced. + + Autoencode password if needed. + + Clear password if argument is None. + + """ + # N.B. doesn't cleanup query/fragment + if password is None: + pass + elif isinstance(password, str): + password = self._QUOTER(password) + else: + raise TypeError("Invalid password type") + if not self.is_absolute(): + raise ValueError("password replacement is not allowed for relative URLs") + val = self._val + return URL( + self._val._replace( + netloc=self._make_netloc(val.username, password, val.hostname, val.port) + ), + encoded=True, + ) + + def with_host(self, host): + """Return a new URL with host replaced. + + Autoencode host if needed. + + Changing host for relative URLs is not allowed, use .join() + instead. + + """ + # N.B. doesn't cleanup query/fragment + if not isinstance(host, str): + raise TypeError("Invalid host type") + if not self.is_absolute(): + raise ValueError("host replacement is not allowed for relative URLs") + if not host: + raise ValueError("host removing is not allowed") + val = self._val + return URL( + self._val._replace( + netloc=self._make_netloc(val.username, val.password, host, val.port) + ), + encoded=True, + ) + + def with_port(self, port): + """Return a new URL with port replaced. + + Clear port to default if None is passed. + + """ + # N.B. doesn't cleanup query/fragment + if port is not None and not isinstance(port, int): + raise TypeError(f"port should be int or None, got {type(port)}") + if not self.is_absolute(): + raise ValueError("port replacement is not allowed for relative URLs") + val = self._val + return URL( + self._val._replace( + netloc=self._make_netloc(val.username, val.password, val.hostname, port) + ), + encoded=True, + ) + + def with_path(self, path, *, encoded=False): + """Return a new URL with path replaced.""" + if not encoded: + path = self._PATH_QUOTER(path) + if self.is_absolute(): + path = self._normalize_path(path) + if len(path) > 0 and path[0] != "/": + path = "/" + path + return URL(self._val._replace(path=path, query="", fragment=""), encoded=True) + + @classmethod + def _query_seq_pairs(cls, quoter, pairs): + for key, val in pairs: + if isinstance(val, (list, tuple)): + for v in val: + yield quoter(key) + "=" + quoter(cls._query_var(v)) + else: + yield quoter(key) + "=" + quoter(cls._query_var(val)) + + @staticmethod + def _query_var(v): + cls = type(v) + if issubclass(cls, str): + return v + if issubclass(cls, float): + if math.isinf(v): + raise ValueError("float('inf') is not supported") + if math.isnan(v): + raise ValueError("float('nan') is not supported") + return str(float(v)) + if issubclass(cls, int) and cls is not bool: + return str(int(v)) + raise TypeError( + "Invalid variable type: value " + "should be str, int or float, got {!r} " + "of type {}".format(v, cls) + ) + + def _get_str_query(self, *args, **kwargs): + if kwargs: + if len(args) > 0: + raise ValueError( + "Either kwargs or single query parameter must be present" + ) + query = kwargs + elif len(args) == 1: + query = args[0] + else: + raise ValueError("Either kwargs or single query parameter must be present") + + if query is None: + query = "" + elif isinstance(query, Mapping): + quoter = self._QUERY_PART_QUOTER + query = "&".join(self._query_seq_pairs(quoter, query.items())) + elif isinstance(query, str): + query = self._QUERY_QUOTER(query) + elif isinstance(query, (bytes, bytearray, memoryview)): + raise TypeError( + "Invalid query type: bytes, bytearray and memoryview are forbidden" + ) + elif isinstance(query, Sequence): + quoter = self._QUERY_PART_QUOTER + # We don't expect sequence values if we're given a list of pairs + # already; only mappings like builtin `dict` which can't have the + # same key pointing to multiple values are allowed to use + # `_query_seq_pairs`. + query = "&".join( + quoter(k) + "=" + quoter(self._query_var(v)) for k, v in query + ) + else: + raise TypeError( + "Invalid query type: only str, mapping or " + "sequence of (key, value) pairs is allowed" + ) + + return query + + def with_query(self, *args, **kwargs): + """Return a new URL with query part replaced. + + Accepts any Mapping (e.g. dict, multidict.MultiDict instances) + or str, autoencode the argument if needed. + + A sequence of (key, value) pairs is supported as well. + + It also can take an arbitrary number of keyword arguments. + + Clear query if None is passed. + + """ + # N.B. doesn't cleanup query/fragment + + new_query = self._get_str_query(*args, **kwargs) + return URL( + self._val._replace(path=self._val.path, query=new_query), encoded=True + ) + + def update_query(self, *args, **kwargs): + """Return a new URL with query part updated.""" + s = self._get_str_query(*args, **kwargs) + new_query = MultiDict(parse_qsl(s, keep_blank_values=True)) + query = MultiDict(self.query) + query.update(new_query) + + return URL(self._val._replace(query=self._get_str_query(query)), encoded=True) + + def with_fragment(self, fragment): + """Return a new URL with fragment replaced. + + Autoencode fragment if needed. + + Clear fragment to default if None is passed. + + """ + # N.B. doesn't cleanup query/fragment + if fragment is None: + raw_fragment = "" + elif not isinstance(fragment, str): + raise TypeError("Invalid fragment type") + else: + raw_fragment = self._FRAGMENT_QUOTER(fragment) + if self.raw_fragment == raw_fragment: + return self + return URL(self._val._replace(fragment=raw_fragment), encoded=True) + + def with_name(self, name): + """Return a new URL with name (last part of path) replaced. + + Query and fragment parts are cleaned up. + + Name is encoded if needed. + + """ + # N.B. DOES cleanup query/fragment + if not isinstance(name, str): + raise TypeError("Invalid name type") + if "/" in name: + raise ValueError("Slash in name is not allowed") + name = self._PATH_QUOTER(name) + if name in (".", ".."): + raise ValueError(". and .. values are forbidden") + parts = list(self.raw_parts) + if self.is_absolute(): + if len(parts) == 1: + parts.append(name) + else: + parts[-1] = name + parts[0] = "" # replace leading '/' + else: + parts[-1] = name + if parts[0] == "/": + parts[0] = "" # replace leading '/' + return URL( + self._val._replace(path="/".join(parts), query="", fragment=""), + encoded=True, + ) + + def with_suffix(self, suffix): + """Return a new URL with suffix (file extension of name) replaced. + + Query and fragment parts are cleaned up. + + suffix is encoded if needed. + """ + if not isinstance(suffix, str): + raise TypeError("Invalid suffix type") + if suffix and not suffix.startswith(".") or suffix == ".": + raise ValueError(f"Invalid suffix {suffix!r}") + name = self.raw_name + if not name: + raise ValueError(f"{self!r} has an empty name") + old_suffix = self.raw_suffix + if not old_suffix: + name = name + suffix + else: + name = name[: -len(old_suffix)] + suffix + return self.with_name(name) + + def join(self, url): + """Join URLs + + Construct a full (“absolute”) URL by combining a “base URL” + (self) with another URL (url). + + Informally, this uses components of the base URL, in + particular the addressing scheme, the network location and + (part of) the path, to provide missing components in the + relative URL. + + """ + # See docs for urllib.parse.urljoin + if not isinstance(url, URL): + raise TypeError("url should be URL") + return URL(urljoin(str(self), str(url)), encoded=True) + + def human_repr(self): + """Return decoded human readable string for URL representation.""" + user = _human_quote(self.user, "#/:?@") + password = _human_quote(self.password, "#/:?@") + host = self.host + if host: + host = self._encode_host(self.host, human=True) + path = _human_quote(self.path, "#?") + query_string = "&".join( + "{}={}".format(_human_quote(k, "#&+;="), _human_quote(v, "#&+;=")) + for k, v in self.query.items() + ) + fragment = _human_quote(self.fragment, "") + return urlunsplit( + SplitResult( + self.scheme, + self._make_netloc( + user, + password, + host, + self._val.port, + encode_host=False, + ), + path, + query_string, + fragment, + ) + ) + + +def _human_quote(s, unsafe): + if not s: + return s + for c in "%" + unsafe: + if c in s: + s = s.replace(c, f"%{ord(c):02X}") + if s.isprintable(): + return s + return "".join(c if c.isprintable() else quote(c) for c in s) + + +_MAXCACHE = 256 + + +@functools.lru_cache(_MAXCACHE) +def _idna_decode(raw): + try: + return idna.decode(raw.encode("ascii")) + except UnicodeError: # e.g. '::1' + return raw.encode("ascii").decode("idna") + + +@functools.lru_cache(_MAXCACHE) +def _idna_encode(host): + try: + return idna.encode(host, uts46=True).decode("ascii") + except UnicodeError: + return host.encode("idna").decode("ascii") + + +@rewrite_module +def cache_clear(): + _idna_decode.cache_clear() + _idna_encode.cache_clear() + + +@rewrite_module +def cache_info(): + return { + "idna_encode": _idna_encode.cache_info(), + "idna_decode": _idna_decode.cache_info(), + } + + +@rewrite_module +def cache_configure(*, idna_encode_size=_MAXCACHE, idna_decode_size=_MAXCACHE): + global _idna_decode, _idna_encode + + _idna_encode = functools.lru_cache(idna_encode_size)(_idna_encode.__wrapped__) + _idna_decode = functools.lru_cache(idna_decode_size)(_idna_decode.__wrapped__) diff --git a/venv/lib/python3.8/site-packages/yarl/py.typed b/venv/lib/python3.8/site-packages/yarl/py.typed new file mode 100644 index 0000000..dcf2c80 --- /dev/null +++ b/venv/lib/python3.8/site-packages/yarl/py.typed @@ -0,0 +1 @@ +# Placeholder diff --git a/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/LICENSE b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/LICENSE new file mode 120000 index 0000000..8aeefcb --- /dev/null +++ b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/LICENSE @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/db/3f/02/46b1f9278f15845b99fec478b8b506eb76487993722f8c6e254285faf8 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/METADATA new file mode 100644 index 0000000..5a68b45 --- /dev/null +++ b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/METADATA @@ -0,0 +1,100 @@ +Metadata-Version: 2.1 +Name: zipp +Version: 3.8.1 +Summary: Backport of pathlib-compatible object wrapper for zip files +Home-page: https://github.com/jaraco/zipp +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' +Requires-Dist: jaraco.itertools ; extra == 'testing' +Requires-Dist: func-timeout ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/zipp.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/zipp.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/zipp + +.. image:: https://github.com/jaraco/zipp/workflows/tests/badge.svg + :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest +.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2022-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://tidelift.com/badges/package/pypi/zipp + :target: https://tidelift.com/subscription/pkg/pypi-zipp?utm_source=pypi-zipp&utm_medium=readme + + +A pathlib-compatible Zipfile object wrapper. Official backport of the standard library +`Path object `_. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - zipp + - stdlib + * - 3.5 + - 3.11 + * - 3.3 + - 3.9 + * - 1.0 + - 3.8 + + +Usage +===== + +Use ``zipp.Path`` in place of ``zipfile.Path`` on any Python. + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/RECORD new file mode 100644 index 0000000..1857d6b --- /dev/null +++ b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/zipp.cpython-38.pyc,, +zipp-3.8.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +zipp-3.8.1.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 +zipp-3.8.1.dist-info/METADATA,sha256=7HxlSDdTym_AHIlxFtmjkU_SB-cIzYQsaU7fexaPEJY,3473 +zipp-3.8.1.dist-info/RECORD,, +zipp-3.8.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +zipp-3.8.1.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5 +zipp.py,sha256=5hrx38_-kX0WCpfBk5LSwCIMYraThjaNj5HsriUSB-8,8029 diff --git a/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/WHEEL new file mode 120000 index 0000000..7e89bc1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/WHEEL @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/1b/5e/87/e00dc87a84269cead8578b9e6462928e18a95f1f3373c9eef451a5bcc0 \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/top_level.txt new file mode 120000 index 0000000..e54fc9c --- /dev/null +++ b/venv/lib/python3.8/site-packages/zipp-3.8.1.dist-info/top_level.txt @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/88/06/dd/a121df686a817d56f65ee47d26a4901c2a0eb0eb46eb2f42fcb4a9a85c \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/zipp.py b/venv/lib/python3.8/site-packages/zipp.py new file mode 120000 index 0000000..b141a22 --- /dev/null +++ b/venv/lib/python3.8/site-packages/zipp.py @@ -0,0 +1 @@ +/home/runner/.cache/pip/pool/e6/1a/f1/dfcffe917d160a97c19392d2c0220c62b69386368d8f91ecae251207ef \ No newline at end of file diff --git a/venv/lib64 b/venv/lib64 new file mode 120000 index 0000000..7951405 --- /dev/null +++ b/venv/lib64 @@ -0,0 +1 @@ +lib \ No newline at end of file diff --git a/venv/pyvenv.cfg b/venv/pyvenv.cfg new file mode 100644 index 0000000..e60a68c --- /dev/null +++ b/venv/pyvenv.cfg @@ -0,0 +1,3 @@ +home = /nix/store/2vm88xw7513h9pyjyafw32cps51b0ia1-python3-3.8.12/bin +include-system-site-packages = false +version = 3.8.12 diff --git a/venv/src/python-lsp-server b/venv/src/python-lsp-server new file mode 160000 index 0000000..7cad321 --- /dev/null +++ b/venv/src/python-lsp-server @@ -0,0 +1 @@ +Subproject commit 7cad32131ea95a1eaf30e5e5c67df265ddb702e3